From eca6a1494108f182a794430b154e347780f85ac9 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 16 Dec 2024 10:42:13 +0300 Subject: [PATCH 01/70] feat(tracing): Init gunicorn tracing --- examples/flask/charmcraft.yaml | 14 +++++++-- examples/flask/requirements.txt | 5 ++- examples/flask/src/charm.py | 18 ++++++++++- examples/flask/test_rock/app.py | 4 +++ examples/flask/test_rock/requirements.txt | 10 ++++++ examples/flask/test_rock/rockcraft.yaml | 7 +++++ src/paas_charm/_gunicorn/charm.py | 12 +++++-- src/paas_charm/_gunicorn/webserver.py | 38 +++++++++++++++++++++++ src/paas_charm/_gunicorn/wsgi_app.py | 3 ++ src/paas_charm/app.py | 8 +++++ src/paas_charm/charm.py | 29 +++++++++++++---- 11 files changed, 134 insertions(+), 14 deletions(-) diff --git a/examples/flask/charmcraft.yaml b/examples/flask/charmcraft.yaml index 292dba0..c73a3fe 100644 --- a/examples/flask/charmcraft.yaml +++ b/examples/flask/charmcraft.yaml @@ -57,9 +57,9 @@ config: type: string flask-secret-key-id: description: >- - This configuration is similar to `flask-secret-key`, but instead accepts a Juju user secret ID. - The secret should contain a single key, "value", which maps to the actual Flask secret key. - To create the secret, run the following command: + This configuration is similar to `flask-secret-key`, but instead accepts a Juju user secret ID. + The secret should contain a single key, "value", which maps to the actual Flask secret key. + To create the secret, run the following command: `juju add-secret my-flask-secret-key value= && juju grant-secret my-flask-secret-key flask-k8s`, and use the outputted secret ID to configure this option. type: secret @@ -130,6 +130,14 @@ requires: interface: rabbitmq optional: True limit: 1 + charm-tracing: + interface: tracing + limit: 1 + optional: true + workload-tracing: + interface: tracing + limit: 1 + optional: true resources: flask-app-image: diff --git a/examples/flask/requirements.txt b/examples/flask/requirements.txt index cd0f194..8058032 100644 --- a/examples/flask/requirements.txt +++ b/examples/flask/requirements.txt @@ -1,4 +1,7 @@ cosl jsonschema >=4.19,<4.20 ops >= 2.6 -pydantic==2.6.4 +# pydantic==2.6.4 +https://github.com/canonical/paas-charm/archive/async-workers.tar.gz +gunicorn +opentelemetry-distro diff --git a/examples/flask/src/charm.py b/examples/flask/src/charm.py index 1274e26..5a6e1c7 100755 --- a/examples/flask/src/charm.py +++ b/examples/flask/src/charm.py @@ -8,12 +8,14 @@ import typing import ops +from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm +from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer, charm_tracing_config import paas_charm.flask logger = logging.getLogger(__name__) - +@trace_charm(tracing_endpoint="charm_tracing_endpoint") class FlaskCharm(paas_charm.flask.Charm): """Flask Charm service.""" @@ -24,6 +26,20 @@ def __init__(self, *args: typing.Any) -> None: args: passthrough to CharmBase. """ super().__init__(*args) + self.charm_tracing = TracingEndpointRequirer(self, relation_name="charm-tracing", protocols=["otlp_http"]) + self.workload_tracing = TracingEndpointRequirer(self, relation_name="workload-tracing", protocols=["otlp_grpc"]) + if self.charm_tracing.is_ready(): + logger.info("```````````````: %s", self.charm_tracing.get_endpoint("otlp_http")) + if self.workload_tracing.is_ready(): + logger.info("```````````````: %s", self.workload_tracing.get_endpoint("otlp_grpc")) + # self.charm_tracing_endpoint, _ = charm_tracing_config(self.charm_tracing,None) + + @property + def charm_tracing_endpoint(self) -> str | None: + """Tempo endpoint for workload tracing""" + if self.charm_tracing.is_ready(): + return self.charm_tracing.get_endpoint("otlp_http") + return None if __name__ == "__main__": # pragma: nocover diff --git a/examples/flask/test_rock/app.py b/examples/flask/test_rock/app.py index 7d0b087..ef0e4d7 100644 --- a/examples/flask/test_rock/app.py +++ b/examples/flask/test_rock/app.py @@ -20,6 +20,8 @@ import redis from celery import Celery, Task from flask import Flask, g, jsonify, request +from opentelemetry import trace +from opentelemetry.instrumentation.flask import FlaskInstrumentor def hostname(): @@ -54,7 +56,9 @@ def __call__(self, *args: object, **kwargs: object) -> object: app = Flask(__name__) app.config.from_prefixed_env() +FlaskInstrumentor().instrument_app(app) +tracer = trace.get_tracer(__name__) broker_url = os.environ.get("REDIS_DB_CONNECT_STRING") # Configure Celery only if Redis is configured celery_app = celery_init_app(app, broker_url) diff --git a/examples/flask/test_rock/requirements.txt b/examples/flask/test_rock/requirements.txt index 2ff69c0..7f29329 100644 --- a/examples/flask/test_rock/requirements.txt +++ b/examples/flask/test_rock/requirements.txt @@ -8,3 +8,13 @@ redis[hiredis] boto3 pika celery +googleapis-common-protos +grpcio +opentelemetry-api +opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http +opentelemetry-instrumentation +opentelemetry-instrumentation-flask +opentelemetry-instrumentation-wsgi +opentelemetry-sdk +opentelemetry-distro diff --git a/examples/flask/test_rock/rockcraft.yaml b/examples/flask/test_rock/rockcraft.yaml index 8dcdf56..4c9565f 100644 --- a/examples/flask/test_rock/rockcraft.yaml +++ b/examples/flask/test_rock/rockcraft.yaml @@ -25,3 +25,10 @@ services: startup: enabled user: _daemon_ working-dir: /flask/app + +parts: + flask-framework/dependencies: + override-stage: | + pip install opentelemetry-distro + craftctl default + opentelemetry-bootstrap diff --git a/src/paas_charm/_gunicorn/charm.py b/src/paas_charm/_gunicorn/charm.py index 0273687..61235f1 100644 --- a/src/paas_charm/_gunicorn/charm.py +++ b/src/paas_charm/_gunicorn/charm.py @@ -2,13 +2,18 @@ # See LICENSE file for licensing details. """The base charm class for all charms.""" + import logging -from paas_charm._gunicorn.webserver import GunicornWebserver, WebserverConfig +from cosl import JujuTopology +from ops.pebble import ExecError, ExecProcess + +from paas_charm._gunicorn.webserver import GunicornWebserver, WebserverConfig, WorkerClassEnum from paas_charm._gunicorn.workload_config import create_workload_config from paas_charm._gunicorn.wsgi_app import WsgiApp from paas_charm.app import App, WorkloadConfig from paas_charm.charm import PaasCharm +from paas_charm.exceptions import CharmConfigInvalidError logger = logging.getLogger(__name__) @@ -23,7 +28,7 @@ def _workload_config(self) -> WorkloadConfig: framework_name=self._framework_name, unit_name=self.unit.name ) - def _create_app(self) -> App: + def _create_app(self, topology: JujuTopology) -> App: """Build an App instance for the Gunicorn based charm. Returns: @@ -32,7 +37,7 @@ def _create_app(self) -> App: charm_state = self._create_charm_state() webserver = GunicornWebserver( - webserver_config=WebserverConfig.from_charm_config(dict(self.config)), + webserver_config=self.create_webserver_config(), workload_config=self._workload_config, container=self.unit.get_container(self._workload_config.container_name), ) @@ -43,4 +48,5 @@ def _create_app(self) -> App: workload_config=self._workload_config, webserver=webserver, database_migration=self._database_migration, + juju_topology=topology, ) diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index 854ed72..9dbcf9c 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -132,6 +132,13 @@ def _config(self) -> str: ) config = textwrap.dedent( f"""\ + from opentelemetry import trace + #from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter + from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + from opentelemetry.sdk.resources import Resource + from opentelemetry.sdk.trace import TracerProvider + from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter + bind = ['0.0.0.0:{self._workload_config.port}'] chdir = {repr(str(self._workload_config.app_dir))} accesslog = {access_log} @@ -139,6 +146,37 @@ def _config(self) -> str: statsd_host = {repr(STATSD_HOST)} """ ) + config_2 = textwrap.dedent( + """\ + def post_fork(server, worker): + resource = Resource.create( + attributes={ + "service.name": "flask-k8s-app", + "compose_service": "flask-k8s-charm", + "charm_type": "FlaskCharm", + "worker": worker.pid, + "juju_application":"flask-k8s", + "juju_model":"flask-model", + "juju_model_uuid":"c66de66b-d52b-4d7b-8efe-d5b14cbd54a3", + "juju_unit":"flask-k8s/0", + "juju_charm":"flask-k8s", + } + ) + + trace.set_tracer_provider(TracerProvider(resource=resource)) + # This uses insecure connection for the purpose of example. Please see the + # OTLP Exporter documentation for other options. + span_processor = BatchSpanProcessor( + OTLPSpanExporter(endpoint="http://grafana-agent-k8s-0.grafana-agent-k8s-endpoints.flask-model.svc.cluster.local:4318/v1/traces", + # OTLPSpanExporter(endpoint="grafana-agent-k8s-0.grafana-agent-k8s-endpoints.flask-model.svc.cluster.local:4317", + # insecure=True, + ) + ) + trace.get_tracer_provider().add_span_processor(span_processor) + + """ + ) + config += config_2 config += "\n".join(config_entries) return config diff --git a/src/paas_charm/_gunicorn/wsgi_app.py b/src/paas_charm/_gunicorn/wsgi_app.py index e5a5a20..94cc83a 100644 --- a/src/paas_charm/_gunicorn/wsgi_app.py +++ b/src/paas_charm/_gunicorn/wsgi_app.py @@ -6,6 +6,7 @@ import logging import ops +from cosl import JujuTopology from paas_charm._gunicorn.webserver import GunicornWebserver from paas_charm.app import App, WorkloadConfig @@ -26,6 +27,7 @@ def __init__( # pylint: disable=too-many-arguments workload_config: WorkloadConfig, database_migration: DatabaseMigration, webserver: GunicornWebserver, + juju_topology: JujuTopology, ): """Construct the WsgiApp instance. @@ -43,6 +45,7 @@ def __init__( # pylint: disable=too-many-arguments database_migration=database_migration, configuration_prefix=f"{workload_config.framework.upper()}_", framework_config_prefix=f"{workload_config.framework.upper()}_", + juju_topology=juju_topology ) self._webserver = webserver diff --git a/src/paas_charm/app.py b/src/paas_charm/app.py index 728abfc..9ab98f3 100644 --- a/src/paas_charm/app.py +++ b/src/paas_charm/app.py @@ -11,6 +11,7 @@ from typing import List import ops +from cosl import JujuTopology from paas_charm.charm_state import CharmState, IntegrationsState from paas_charm.database_migration import DatabaseMigration @@ -80,6 +81,7 @@ def __init__( # pylint: disable=too-many-arguments framework_config_prefix: str = "APP_", configuration_prefix: str = "APP_", integrations_prefix: str = "", + juju_topology: JujuTopology, ): """Construct the App instance. @@ -99,6 +101,7 @@ def __init__( # pylint: disable=too-many-arguments self.framework_config_prefix = framework_config_prefix self.configuration_prefix = configuration_prefix self.integrations_prefix = integrations_prefix + self._juju_topology = juju_topology def stop_all_services(self) -> None: """Stop all the services in the workload. @@ -152,6 +155,11 @@ def gen_environment(self) -> dict[str, str]: for k, v in framework_config.items() } ) + if self._juju_topology: + env["OTEL_RESOURCE_ATTRIBUTES"] = ( + f"juju_application={self._juju_topology.application},juju_model={self._juju_topology.model},juju_model_uuid={self._juju_topology.model_uuid},juju_unit={self._juju_topology.unit},juju_charm={self._juju_topology.charm_name}" + ) + env["OTEL_EXPORTER_OTLP_PROTOCOL"] = "grpc" if self._charm_state.base_url: env[f"{prefix}BASE_URL"] = self._charm_state.base_url diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 346a885..6f26dba 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -9,7 +9,10 @@ import ops from charms.data_platform_libs.v0.data_interfaces import DatabaseRequiresEvent from charms.redis_k8s.v0.redis import RedisRelationCharmEvents, RedisRequires + +# from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer from charms.traefik_k8s.v2.ingress import IngressPerAppRequirer +from cosl import JujuTopology from ops.model import Container from pydantic import BaseModel, ValidationError @@ -65,7 +68,7 @@ def _workload_config(self) -> WorkloadConfig: """Return an WorkloadConfig instance.""" @abc.abstractmethod - def _create_app(self) -> App: + def _create_app(self, juju_topology: JujuTopology) -> App: """Create an App instance.""" on = RedisRelationCharmEvents() @@ -82,8 +85,14 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self._secret_storage = KeySecretStorage(charm=self, key=f"{framework_name}_secret_key") self._database_requirers = make_database_requirers(self, self.app.name) - - requires = self.framework.meta.requires + # self.tracing = TracingEndpointRequirer(self, + # protocols=['otlp_grpc', 'otlp_http'] + # ) + # if self.tracing.is_ready(): + # logger.info("=================== tracing[otlp_grpc]: %s", self.tracing.get_endpoint('otlp_grpc')) + # logger.info("=================== tracing[otlp_http]: %s", self.tracing.get_endpoint('otlp_http')) + + requires = self.framework.meta.requires ######************* if "redis" in requires and requires["redis"].interface_name == "redis": self._redis = RedisRequires(charm=self, relation_name="redis") self.framework.observe(self.on.redis_relation_updated, self._on_redis_relation_updated) @@ -163,6 +172,8 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self.framework.observe( self.on[self._workload_config.container_name].pebble_ready, self._on_pebble_ready ) + self._topology = JujuTopology.from_charm(self) + logger.info("-----------: %s", str(self._topology)) def get_framework_config(self) -> BaseModel: """Return the framework related configurations. @@ -197,6 +208,9 @@ def _container(self) -> Container: @block_if_invalid_config def _on_config_changed(self, _: ops.EventBase) -> None: """Configure the application pebble service layer.""" + # if self.tracing.is_ready(): + # logger.info("=================== tracing[otlp_grpc]: %s", self.tracing.get_endpoint('otlp_grpc')) + # logger.info("=================== tracing[otlp_http]: %s", self.tracing.get_endpoint('otlp_http')) self.restart() @block_if_invalid_config @@ -257,7 +271,7 @@ def is_ready(self) -> bool: missing_integrations = self._missing_required_integrations(charm_state) if missing_integrations: - self._create_app().stop_all_services() + self._create_app(self._topology).stop_all_services() self._database_migration.set_status_to_pending() message = f"missing integrations: {', '.join(missing_integrations)}" logger.info(message) @@ -312,7 +326,7 @@ def restart(self, rerun_migrations: bool = False) -> None: return try: self.update_app_and_unit_status(ops.MaintenanceStatus("Preparing service for restart")) - self._create_app().restart() + self._create_app(self._topology).restart() except CharmConfigInvalidError as exc: self.update_app_and_unit_status(ops.BlockedStatus(exc.msg)) return @@ -328,7 +342,10 @@ def _gen_environment(self) -> dict[str, str]: Returns: A dictionary representing the application environment variables. """ - return self._create_app().gen_environment() + logger.info("-----------: %s", str(self._topology)) + env = self._create_app(self._topology).gen_environment() + + return env def _create_charm_state(self) -> CharmState: """Create charm state. From 0fabc4f7963a23c42601d9252b99b798f9993350 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 16 Dec 2024 10:43:04 +0300 Subject: [PATCH 02/70] chore(example): Change paas-charm branch in example flask --- examples/flask/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/flask/requirements.txt b/examples/flask/requirements.txt index 8058032..e327757 100644 --- a/examples/flask/requirements.txt +++ b/examples/flask/requirements.txt @@ -2,6 +2,6 @@ cosl jsonschema >=4.19,<4.20 ops >= 2.6 # pydantic==2.6.4 -https://github.com/canonical/paas-charm/archive/async-workers.tar.gz +https://github.com/canonical/paas-charm/archive/tempo-tracing.tar.gz gunicorn opentelemetry-distro From 7fa73fe771c7eb1cb58341a297e610c55975a781 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 16 Dec 2024 10:53:19 +0300 Subject: [PATCH 03/70] chore(example): Fix missing import and libraries --- .../tempo_coordinator_k8s/v0/charm_tracing.py | 1089 +++++++++++++++++ .../tempo_coordinator_k8s/v0/tracing.py | 1004 +++++++++++++++ examples/flask/requirements.txt | 4 +- src/paas_charm/_gunicorn/charm.py | 2 +- 4 files changed, 2095 insertions(+), 4 deletions(-) create mode 100644 examples/flask/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py create mode 100644 examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py diff --git a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py new file mode 100644 index 0000000..cf8def1 --- /dev/null +++ b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py @@ -0,0 +1,1089 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +"""This charm library contains utilities to instrument your Charm with opentelemetry tracing data collection. + +(yes! charm code, not workload code!) + +This means that, if your charm is related to, for example, COS' Tempo charm, you will be able to inspect +in real time from the Grafana dashboard the execution flow of your charm. + +# Quickstart +Fetch the following charm libs (and ensure the minimum version/revision numbers are satisfied): + + charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing # >= 1.10 + charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.charm_tracing # >= 2.7 + +Then edit your charm code to include: + +```python +# import the necessary charm libs +from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer, charm_tracing_config +from charms.tempo_coordinator_k8s.v0.charm_tracing import charm_tracing + +# decorate your charm class with charm_tracing: +@charm_tracing( + # forward-declare the instance attributes that the instrumentor will look up to obtain the + # tempo endpoint and server certificate + tracing_endpoint="tracing_endpoint", + server_cert="server_cert" +) +class MyCharm(CharmBase): + _path_to_cert = "/path/to/cert.crt" + # path to cert file **in the charm container**. Its presence will be used to determine whether + # the charm is ready to use tls for encrypting charm traces. If your charm does not support tls, + # you can ignore this and pass None to charm_tracing_config. + # If you do support TLS, you'll need to make sure that the server cert is copied to this location + # and kept up to date so the instrumentor can use it. + + def __init__(self, ...): + ... + self.tracing = TracingEndpointRequirer(self, ...) + self.tracing_endpoint, self.server_cert = charm_tracing_config(self.tracing, self._path_to_cert) +``` + +# Detailed usage +To use this library, you need to do two things: +1) decorate your charm class with + +`@trace_charm(tracing_endpoint="my_tracing_endpoint")` + +2) add to your charm a "my_tracing_endpoint" (you can name this attribute whatever you like) +**property**, **method** or **instance attribute** that returns an otlp http/https endpoint url. +If you are using the ``charms.tempo_coordinator_k8s.v0.tracing.TracingEndpointRequirer`` as +``self.tracing = TracingEndpointRequirer(self)``, the implementation could be: + +``` + @property + def my_tracing_endpoint(self) -> Optional[str]: + '''Tempo endpoint for charm tracing''' + if self.tracing.is_ready(): + return self.tracing.get_endpoint("otlp_http") + else: + return None +``` + +At this point your charm will be automatically instrumented so that: +- charm execution starts a trace, containing + - every event as a span (including custom events) + - every charm method call (except dunders) as a span + +We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests +go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down. + + +## TLS support +If your charm integrates with a TLS provider which is also trusted by the tracing provider (the Tempo charm), +you can configure ``charm_tracing`` to use TLS by passing a ``server_cert`` parameter to the decorator. + +If your charm is not trusting the same CA as the Tempo endpoint it is sending traces to, +you'll need to implement a cert-transfer relation to obtain the CA certificate from the same +CA that Tempo is using. + +For example: +``` +from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm +@trace_charm( + tracing_endpoint="my_tracing_endpoint", + server_cert="_server_cert" +) +class MyCharm(CharmBase): + self._server_cert = "/path/to/server.crt" + ... + + def on_tls_changed(self, e) -> Optional[str]: + # update the server cert on the charm container for charm tracing + Path(self._server_cert).write_text(self.get_server_cert()) + + def on_tls_broken(self, e) -> Optional[str]: + # remove the server cert so charm_tracing won't try to use tls anymore + Path(self._server_cert).unlink() +``` + + +## More fine-grained manual instrumentation +if you wish to add more spans to the trace, you can do so by getting a hold of the tracer like so: +``` +import opentelemetry +... +def get_tracer(self) -> opentelemetry.trace.Tracer: + return opentelemetry.trace.get_tracer(type(self).__name__) +``` + +By default, the tracer is named after the charm type. If you wish to override that, you can pass +a different ``service_name`` argument to ``trace_charm``. + +See the official opentelemetry Python SDK documentation for usage: +https://opentelemetry-python.readthedocs.io/en/latest/ + + +## Caching traces +The `trace_charm` machinery will buffer any traces collected during charm execution and store them +to a file on the charm container until a tracing backend becomes available. At that point, it will +flush them to the tracing receiver. + +By default, the buffer is configured to start dropping old traces if any of these conditions apply: + +- the storage size exceeds 10 MiB +- the number of buffered events exceeds 100 + +You can configure this by, for example: + +```python +@trace_charm( + tracing_endpoint="my_tracing_endpoint", + server_cert="_server_cert", + # only cache up to 42 events + buffer_max_events=42, + # only cache up to 42 MiB + buffer_max_size_mib=42, # minimum 10! +) +class MyCharm(CharmBase): + ... +``` + +Note that setting `buffer_max_events` to 0 will effectively disable the buffer. + +The path of the buffer file is by default in the charm's execution root, which for k8s charms means +that in case of pod churn, the cache will be lost. The recommended solution is to use an existing storage +(or add a new one) such as: + +```yaml +storage: + data: + type: filesystem + location: /charm-traces +``` + +and then configure the `@trace_charm` decorator to use it as path for storing the buffer: +```python +@trace_charm( + tracing_endpoint="my_tracing_endpoint", + server_cert="_server_cert", + # store traces to a PVC so they're not lost on pod restart. + buffer_path="/charm-traces/buffer.file", +) +class MyCharm(CharmBase): + ... +``` + +## Upgrading from `v0` + +If you are upgrading from `charm_tracing` v0, you need to take the following steps (assuming you already +have the newest version of the library in your charm): +1) If you need the dependency for your tests, add the following dependency to your charm project +(or, if your project had a dependency on `opentelemetry-exporter-otlp-proto-grpc` only because +of `charm_tracing` v0, you can replace it with): + +`opentelemetry-exporter-otlp-proto-http>=1.21.0`. + +2) Update the charm method referenced to from ``@trace`` and ``@trace_charm``, +to return from ``TracingEndpointRequirer.get_endpoint("otlp_http")`` instead of ``grpc_http``. +For example: + +``` + from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + + @trace_charm( + tracing_endpoint="my_tracing_endpoint", + ) + class MyCharm(CharmBase): + + ... + + @property + def my_tracing_endpoint(self) -> Optional[str]: + '''Tempo endpoint for charm tracing''' + if self.tracing.is_ready(): + return self.tracing.otlp_grpc_endpoint() # OLD API, DEPRECATED. + else: + return None +``` + +needs to be replaced with: + +``` + from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + + @trace_charm( + tracing_endpoint="my_tracing_endpoint", + ) + class MyCharm(CharmBase): + + ... + + @property + def my_tracing_endpoint(self) -> Optional[str]: + '''Tempo endpoint for charm tracing''' + if self.tracing.is_ready(): + return self.tracing.get_endpoint("otlp_http") # NEW API, use this. + else: + return None +``` + +3) If you were passing a certificate (str) using `server_cert`, you need to change it to +provide an *absolute* path to the certificate file instead. +""" +import typing + +from opentelemetry.exporter.otlp.proto.common._internal.trace_encoder import ( + encode_spans, +) +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter + + +def _remove_stale_otel_sdk_packages(): + """Hack to remove stale opentelemetry sdk packages from the charm's python venv. + + See https://github.com/canonical/grafana-agent-operator/issues/146 and + https://bugs.launchpad.net/juju/+bug/2058335 for more context. This patch can be removed after + this juju issue is resolved and sufficient time has passed to expect most users of this library + have migrated to the patched version of juju. When this patch is removed, un-ignore rule E402 for this file in the pyproject.toml (see setting + [tool.ruff.lint.per-file-ignores] in pyproject.toml). + + This only has an effect if executed on an upgrade-charm event. + """ + # all imports are local to keep this function standalone, side-effect-free, and easy to revert later + import os + + if os.getenv("JUJU_DISPATCH_PATH") != "hooks/upgrade-charm": + return + + import logging + import shutil + from collections import defaultdict + + from importlib_metadata import distributions + + otel_logger = logging.getLogger("charm_tracing_otel_patcher") + otel_logger.debug("Applying _remove_stale_otel_sdk_packages patch on charm upgrade") + # group by name all distributions starting with "opentelemetry_" + otel_distributions = defaultdict(list) + for distribution in distributions(): + name = distribution._normalized_name # type: ignore + if name.startswith("opentelemetry_"): + otel_distributions[name].append(distribution) + + otel_logger.debug(f"Found {len(otel_distributions)} opentelemetry distributions") + + # If we have multiple distributions with the same name, remove any that have 0 associated files + for name, distributions_ in otel_distributions.items(): + if len(distributions_) <= 1: + continue + + otel_logger.debug(f"Package {name} has multiple ({len(distributions_)}) distributions.") + for distribution in distributions_: + if not distribution.files: # Not None or empty list + path = distribution._path # type: ignore + otel_logger.info(f"Removing empty distribution of {name} at {path}.") + shutil.rmtree(path) + + otel_logger.debug("Successfully applied _remove_stale_otel_sdk_packages patch. ") + + +# apply hacky patch to remove stale opentelemetry sdk packages on upgrade-charm. +# it could be trouble if someone ever decides to implement their own tracer parallel to +# ours and before the charm has inited. We assume they won't. +_remove_stale_otel_sdk_packages() + +import functools +import inspect +import logging +import os +from contextlib import contextmanager +from contextvars import Context, ContextVar, copy_context +from pathlib import Path +from typing import ( + Any, + Callable, + Generator, + List, + Optional, + Sequence, + Type, + TypeVar, + Union, + cast, +) + +import opentelemetry +import ops +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import ReadableSpan, Span, TracerProvider +from opentelemetry.sdk.trace.export import ( + BatchSpanProcessor, + SpanExporter, + SpanExportResult, +) +from opentelemetry.trace import INVALID_SPAN, Tracer +from opentelemetry.trace import get_current_span as otlp_get_current_span +from opentelemetry.trace import ( + get_tracer, + get_tracer_provider, + set_span_in_context, + set_tracer_provider, +) +from ops.charm import CharmBase +from ops.framework import Framework + +# The unique Charmhub library identifier, never change it +LIBID = "01780f1e588c42c3976d26780fdf9b89" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version + +LIBPATCH = 4 + +PYDEPS = ["opentelemetry-exporter-otlp-proto-http==1.21.0"] + +logger = logging.getLogger("tracing") +dev_logger = logging.getLogger("tracing-dev") + +# set this to 0 if you are debugging/developing this library source +dev_logger.setLevel(logging.ERROR) + +_CharmType = Type[CharmBase] # the type CharmBase and any subclass thereof +_C = TypeVar("_C", bound=_CharmType) +_T = TypeVar("_T", bound=type) +_F = TypeVar("_F", bound=Type[Callable]) +tracer: ContextVar[Tracer] = ContextVar("tracer") +_GetterType = Union[Callable[[_CharmType], Optional[str]], property] + +CHARM_TRACING_ENABLED = "CHARM_TRACING_ENABLED" +BUFFER_DEFAULT_CACHE_FILE_NAME = ".charm_tracing_buffer.raw" +# we store the buffer as raw otlp-native protobuf (bytes) since it's hard to serialize/deserialize it in +# any portable format. Json dumping is supported, but loading isn't. +# cfr: https://github.com/open-telemetry/opentelemetry-python/issues/1003 + +BUFFER_DEFAULT_CACHE_FILE_SIZE_LIMIT_MiB = 10 +_BUFFER_CACHE_FILE_SIZE_LIMIT_MiB_MIN = 10 +BUFFER_DEFAULT_MAX_EVENT_HISTORY_LENGTH = 100 +_MiB_TO_B = 2**20 # megabyte to byte conversion rate +_OTLP_SPAN_EXPORTER_TIMEOUT = 1 +"""Timeout in seconds that the OTLP span exporter has to push traces to the backend.""" + + +class _Buffer: + """Handles buffering for spans emitted while no tracing backend is configured or available. + + Use the max_event_history_length_buffering param of @trace_charm to tune + the amount of memory that this will hog on your units. + + The buffer is formatted as a bespoke byte dump (protobuf limitation). + We cannot store them as json because that is not well-supported by the sdk + (see https://github.com/open-telemetry/opentelemetry-python/issues/3364). + """ + + _SPANSEP = b"__CHARM_TRACING_BUFFER_SPAN_SEP__" + + def __init__(self, db_file: Path, max_event_history_length: int, max_buffer_size_mib: int): + self._db_file = db_file + self._max_event_history_length = max_event_history_length + self._max_buffer_size_mib = max(max_buffer_size_mib, _BUFFER_CACHE_FILE_SIZE_LIMIT_MiB_MIN) + + # set by caller + self.exporter: Optional[OTLPSpanExporter] = None + + def save(self, spans: typing.Sequence[ReadableSpan]): + """Save the spans collected by this exporter to the cache file. + + This method should be as fail-safe as possible. + """ + if self._max_event_history_length < 1: + dev_logger.debug("buffer disabled: max history length < 1") + return + + current_history_length = len(self.load()) + new_history_length = current_history_length + len(spans) + if (diff := self._max_event_history_length - new_history_length) < 0: + self.drop(diff) + self._save(spans) + + def _serialize(self, spans: Sequence[ReadableSpan]) -> bytes: + # encode because otherwise we can't json-dump them + return encode_spans(spans).SerializeToString() + + def _save(self, spans: Sequence[ReadableSpan], replace: bool = False): + dev_logger.debug(f"saving {len(spans)} new spans to buffer") + old = [] if replace else self.load() + new = self._serialize(spans) + + try: + # if the buffer exceeds the size limit, we start dropping old spans until it does + + while len((new + self._SPANSEP.join(old))) > (self._max_buffer_size_mib * _MiB_TO_B): + if not old: + # if we've already dropped all spans and still we can't get under the + # size limit, we can't save this span + logger.error( + f"span exceeds total buffer size limit ({self._max_buffer_size_mib}MiB); " + f"buffering FAILED" + ) + return + + old = old[1:] + logger.warning( + f"buffer size exceeds {self._max_buffer_size_mib}MiB; dropping older spans... " + f"Please increase the buffer size, disable buffering, or ensure the spans can be flushed." + ) + + self._db_file.write_bytes(new + self._SPANSEP.join(old)) + except Exception: + logger.exception("error buffering spans") + + def load(self) -> List[bytes]: + """Load currently buffered spans from the cache file. + + This method should be as fail-safe as possible. + """ + if not self._db_file.exists(): + dev_logger.debug("buffer file not found. buffer empty.") + return [] + try: + spans = self._db_file.read_bytes().split(self._SPANSEP) + except Exception: + logger.exception(f"error parsing {self._db_file}") + return [] + return spans + + def drop(self, n_spans: Optional[int] = None): + """Drop some currently buffered spans from the cache file.""" + current = self.load() + if n_spans: + dev_logger.debug(f"dropping {n_spans} spans from buffer") + new = current[n_spans:] + else: + dev_logger.debug("emptying buffer") + new = [] + + self._db_file.write_bytes(self._SPANSEP.join(new)) + + def flush(self) -> Optional[bool]: + """Export all buffered spans to the given exporter, then clear the buffer. + + Returns whether the flush was successful, and None if there was nothing to flush. + """ + if not self.exporter: + dev_logger.debug("no exporter set; skipping buffer flush") + return False + + buffered_spans = self.load() + if not buffered_spans: + dev_logger.debug("nothing to flush; buffer empty") + return None + + errors = False + for span in buffered_spans: + try: + out = self.exporter._export(span) # type: ignore + if not (200 <= out.status_code < 300): + # take any 2xx status code as a success + errors = True + except ConnectionError: + dev_logger.debug( + "failed exporting buffered span; backend might be down or still starting" + ) + errors = True + except Exception: + logger.exception("unexpected error while flushing span batch from buffer") + errors = True + + if not errors: + self.drop() + else: + logger.error("failed flushing spans; buffer preserved") + return not errors + + @property + def is_empty(self): + """Utility to check whether the buffer has any stored spans. + + This is more efficient than attempting a load() given how large the buffer might be. + """ + return (not self._db_file.exists()) or (self._db_file.stat().st_size == 0) + + +class _OTLPSpanExporter(OTLPSpanExporter): + """Subclass of OTLPSpanExporter to configure the max retry timeout, so that it fails a bit faster.""" + + # The issue we're trying to solve is that the model takes AGES to settle if e.g. tls is misconfigured, + # as every hook of a charm_tracing-instrumented charm takes about a minute to exit, as the charm can't + # flush the traces and keeps retrying for 'too long' + + _MAX_RETRY_TIMEOUT = 4 + # we give the exporter 4 seconds in total to succeed pushing the traces to tempo + # if it fails, we'll be caching the data in the buffer and flush it the next time, so there's no data loss risk. + # this means 2/3 retries (hard to guess from the implementation) and up to ~7 seconds total wait + + +class _BufferedExporter(InMemorySpanExporter): + def __init__(self, buffer: _Buffer) -> None: + super().__init__() + self._buffer = buffer + + def export(self, spans: typing.Sequence[ReadableSpan]) -> SpanExportResult: + self._buffer.save(spans) + return super().export(spans) + + def force_flush(self, timeout_millis: int = 0) -> bool: + # parent implementation is fake, so the timeout_millis arg is not doing anything. + result = super().force_flush(timeout_millis) + self._buffer.save(self.get_finished_spans()) + return result + + +def is_enabled() -> bool: + """Whether charm tracing is enabled.""" + return os.getenv(CHARM_TRACING_ENABLED, "1") == "1" + + +@contextmanager +def charm_tracing_disabled(): + """Contextmanager to temporarily disable charm tracing. + + For usage in tests. + """ + previous = os.getenv(CHARM_TRACING_ENABLED, "1") + os.environ[CHARM_TRACING_ENABLED] = "0" + yield + os.environ[CHARM_TRACING_ENABLED] = previous + + +def get_current_span() -> Union[Span, None]: + """Return the currently active Span, if there is one, else None. + + If you'd rather keep your logic unconditional, you can use opentelemetry.trace.get_current_span, + which will return an object that behaves like a span but records no data. + """ + span = otlp_get_current_span() + if span is INVALID_SPAN: + return None + return cast(Span, span) + + +def _get_tracer_from_context(ctx: Context) -> Optional[ContextVar]: + tracers = [v for v in ctx if v is not None and v.name == "tracer"] + if tracers: + return tracers[0] + return None + + +def _get_tracer() -> Optional[Tracer]: + """Find tracer in context variable and as a fallback locate it in the full context.""" + try: + return tracer.get() + except LookupError: + # fallback: this course-corrects for a user error where charm_tracing symbols are imported + # from different paths (typically charms.tempo_coordinator_k8s... and lib.charms.tempo_coordinator_k8s...) + try: + ctx: Context = copy_context() + if context_tracer := _get_tracer_from_context(ctx): + logger.warning( + "Tracer not found in `tracer` context var. " + "Verify that you're importing all `charm_tracing` symbols from the same module path. \n" + "For example, DO" + ": `from charms.lib...charm_tracing import foo, bar`. \n" + "DONT: \n" + " \t - `from charms.lib...charm_tracing import foo` \n" + " \t - `from lib...charm_tracing import bar` \n" + "For more info: https://python-notes.curiousefficiency.org/en/latest/python" + "_concepts/import_traps.html#the-double-import-trap" + ) + return context_tracer.get() + else: + return None + except LookupError: + return None + + +@contextmanager +def _span(name: str) -> Generator[Optional[Span], Any, Any]: + """Context to create a span if there is a tracer, otherwise do nothing.""" + if tracer := _get_tracer(): + with tracer.start_as_current_span(name) as span: + yield cast(Span, span) + else: + yield None + + +class TracingError(RuntimeError): + """Base class for errors raised by this module.""" + + +class UntraceableObjectError(TracingError): + """Raised when an object you're attempting to instrument cannot be autoinstrumented.""" + + +def _get_tracing_endpoint( + tracing_endpoint_attr: str, + charm_instance: object, + charm_type: type, +): + _tracing_endpoint = getattr(charm_instance, tracing_endpoint_attr) + if callable(_tracing_endpoint): + tracing_endpoint = _tracing_endpoint() + else: + tracing_endpoint = _tracing_endpoint + + if tracing_endpoint is None: + return + + elif not isinstance(tracing_endpoint, str): + raise TypeError( + f"{charm_type.__name__}.{tracing_endpoint_attr} should resolve to a tempo endpoint (string); " + f"got {tracing_endpoint} instead." + ) + + dev_logger.debug(f"Setting up span exporter to endpoint: {tracing_endpoint}/v1/traces") + return f"{tracing_endpoint}/v1/traces" + + +def _get_server_cert( + server_cert_attr: str, + charm_instance: ops.CharmBase, + charm_type: Type[ops.CharmBase], +): + _server_cert = getattr(charm_instance, server_cert_attr) + if callable(_server_cert): + server_cert = _server_cert() + else: + server_cert = _server_cert + + if server_cert is None: + logger.warning( + f"{charm_type}.{server_cert_attr} is None; sending traces over INSECURE connection." + ) + return + elif not Path(server_cert).is_absolute(): + raise ValueError( + f"{charm_type}.{server_cert_attr} should resolve to a valid tls cert absolute path (string | Path)); " + f"got {server_cert} instead." + ) + return server_cert + + +def _setup_root_span_initializer( + charm_type: _CharmType, + tracing_endpoint_attr: str, + server_cert_attr: Optional[str], + service_name: Optional[str], + buffer_path: Optional[Path], + buffer_max_events: int, + buffer_max_size_mib: int, +): + """Patch the charm's initializer.""" + original_init = charm_type.__init__ + + @functools.wraps(original_init) + def wrap_init(self: CharmBase, framework: Framework, *args, **kwargs): + # we're using 'self' here because this is charm init code, makes sense to read what's below + # from the perspective of the charm. Self.unit.name... + + original_init(self, framework, *args, **kwargs) + # we call this from inside the init context instead of, say, _autoinstrument, because we want it to + # be checked on a per-charm-instantiation basis, not on a per-type-declaration one. + if not is_enabled(): + # this will only happen during unittesting, hopefully, so it's fine to log a + # bit more verbosely + logger.info("Tracing DISABLED: skipping root span initialization") + return + + original_event_context = framework._event_context + # default service name isn't just app name because it could conflict with the workload service name + _service_name = service_name or f"{self.app.name}-charm" + + unit_name = self.unit.name + resource = Resource.create( + attributes={ + "service.name": _service_name, + "compose_service": _service_name, + "charm_type": type(self).__name__, + # juju topology + "juju_unit": unit_name, + "juju_application": self.app.name, + "juju_model": self.model.name, + "juju_model_uuid": self.model.uuid, + } + ) + provider = TracerProvider(resource=resource) + + # if anything goes wrong with retrieving the endpoint, we let the exception bubble up. + tracing_endpoint = _get_tracing_endpoint(tracing_endpoint_attr, self, charm_type) + + buffer_only = False + # whether we're only exporting to buffer, or also to the otlp exporter. + + if not tracing_endpoint: + # tracing is off if tracing_endpoint is None + # however we can buffer things until tracing comes online + buffer_only = True + + server_cert: Optional[Union[str, Path]] = ( + _get_server_cert(server_cert_attr, self, charm_type) if server_cert_attr else None + ) + + if (tracing_endpoint and tracing_endpoint.startswith("https://")) and not server_cert: + logger.error( + "Tracing endpoint is https, but no server_cert has been passed." + "Please point @trace_charm to a `server_cert` attr. " + "This might also mean that the tracing provider is related to a " + "certificates provider, but this application is not (yet). " + "In that case, you might just have to wait a bit for the certificates " + "integration to settle. This span will be buffered." + ) + buffer_only = True + + buffer = _Buffer( + db_file=buffer_path or Path() / BUFFER_DEFAULT_CACHE_FILE_NAME, + max_event_history_length=buffer_max_events, + max_buffer_size_mib=buffer_max_size_mib, + ) + previous_spans_buffered = not buffer.is_empty + + exporters: List[SpanExporter] = [] + if buffer_only: + # we have to buffer because we're missing necessary backend configuration + dev_logger.debug("buffering mode: ON") + exporters.append(_BufferedExporter(buffer)) + + else: + dev_logger.debug("buffering mode: FALLBACK") + # in principle, we have the right configuration to be pushing traces, + # but if we fail for whatever reason, we will put everything in the buffer + # and retry the next time + otlp_exporter = _OTLPSpanExporter( + endpoint=tracing_endpoint, + certificate_file=str(Path(server_cert).absolute()) if server_cert else None, + timeout=_OTLP_SPAN_EXPORTER_TIMEOUT, # give individual requests 1 second to succeed + ) + exporters.append(otlp_exporter) + exporters.append(_BufferedExporter(buffer)) + buffer.exporter = otlp_exporter + + for exporter in exporters: + processor = BatchSpanProcessor(exporter) + provider.add_span_processor(processor) + + set_tracer_provider(provider) + _tracer = get_tracer(_service_name) # type: ignore + _tracer_token = tracer.set(_tracer) + + dispatch_path = os.getenv("JUJU_DISPATCH_PATH", "") # something like hooks/install + event_name = dispatch_path.split("/")[1] if "/" in dispatch_path else dispatch_path + root_span_name = f"{unit_name}: {event_name} event" + span = _tracer.start_span(root_span_name, attributes={"juju.dispatch_path": dispatch_path}) + + # all these shenanigans are to work around the fact that the opentelemetry tracing API is built + # on the assumption that spans will be used as contextmanagers. + # Since we don't (as we need to close the span on framework.commit), + # we need to manually set the root span as current. + ctx = set_span_in_context(span) + + # log a trace id, so we can pick it up from the logs (and jhack) to look it up in tempo. + root_trace_id = hex(span.get_span_context().trace_id)[2:] # strip 0x prefix + logger.debug(f"Starting root trace with id={root_trace_id!r}.") + + span_token = opentelemetry.context.attach(ctx) # type: ignore + + @contextmanager + def wrap_event_context(event_name: str): + dev_logger.debug(f"entering event context: {event_name}") + # when the framework enters an event context, we create a span. + with _span("event: " + event_name) as event_context_span: + if event_context_span: + # todo: figure out how to inject event attrs in here + event_context_span.add_event(event_name) + yield original_event_context(event_name) + + framework._event_context = wrap_event_context # type: ignore + + original_close = framework.close + + @functools.wraps(original_close) + def wrap_close(): + dev_logger.debug("tearing down tracer and flushing traces") + span.end() + opentelemetry.context.detach(span_token) # type: ignore + tracer.reset(_tracer_token) + tp = cast(TracerProvider, get_tracer_provider()) + flush_successful = tp.force_flush(timeout_millis=1000) # don't block for too long + + if buffer_only: + # if we're in buffer_only mode, it means we couldn't even set up the exporter for + # tempo as we're missing some data. + # so attempting to flush the buffer doesn't make sense + dev_logger.debug("tracing backend unavailable: all spans pushed to buffer") + + else: + dev_logger.debug("tracing backend found: attempting to flush buffer...") + + # if we do have an exporter for tempo, and we could send traces to it, + # we can attempt to flush the buffer as well. + if not flush_successful: + logger.error("flushing FAILED: unable to push traces to backend.") + else: + dev_logger.debug("flush succeeded.") + + # the backend has accepted the spans generated during this event, + if not previous_spans_buffered: + # if the buffer was empty to begin with, any spans we collected now can be discarded + buffer.drop() + dev_logger.debug("buffer dropped: this trace has been sent already") + else: + # if the buffer was nonempty, we can attempt to flush it + dev_logger.debug("attempting buffer flush...") + buffer_flush_successful = buffer.flush() + if buffer_flush_successful: + dev_logger.debug("buffer flush OK") + elif buffer_flush_successful is None: + # TODO is this even possible? + dev_logger.debug("buffer flush OK; empty: nothing to flush") + else: + # this situation is pretty weird, I'm not even sure it can happen, + # because it would mean that we did manage + # to push traces directly to the tempo exporter (flush_successful), + # but the buffer flush failed to push to the same exporter! + logger.error("buffer flush FAILED") + + tp.shutdown() + original_close() + + framework.close = wrap_close + return + + charm_type.__init__ = wrap_init # type: ignore + + +def trace_charm( + tracing_endpoint: str, + server_cert: Optional[str] = None, + service_name: Optional[str] = None, + extra_types: Sequence[type] = (), + buffer_max_events: int = BUFFER_DEFAULT_MAX_EVENT_HISTORY_LENGTH, + buffer_max_size_mib: int = BUFFER_DEFAULT_CACHE_FILE_SIZE_LIMIT_MiB, + buffer_path: Optional[Union[str, Path]] = None, +) -> Callable[[_T], _T]: + """Autoinstrument the decorated charm with tracing telemetry. + + Use this function to get out-of-the-box traces for all events emitted on this charm and all + method calls on instances of this class. + + Usage: + >>> from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer + >>> from ops import CharmBase + >>> + >>> @trace_charm( + >>> tracing_endpoint="tempo_otlp_http_endpoint", + >>> ) + >>> class MyCharm(CharmBase): + >>> + >>> def __init__(self, framework: Framework): + >>> ... + >>> self.tracing = TracingEndpointRequirer(self) + >>> + >>> @property + >>> def tempo_otlp_http_endpoint(self) -> Optional[str]: + >>> if self.tracing.is_ready(): + >>> return self.tracing.otlp_http_endpoint() + >>> else: + >>> return None + >>> + + :param tracing_endpoint: name of a method, property or attribute on the charm type that returns an + optional (fully resolvable) tempo url to which the charm traces will be pushed. + If None, tracing will be effectively disabled. + :param server_cert: name of a method, property or attribute on the charm type that returns an + optional absolute path to a CA certificate file to be used when sending traces to a remote server. + If it returns None, an _insecure_ connection will be used. To avoid errors in transient + situations where the endpoint is already https but there is no certificate on disk yet, it + is recommended to disable tracing (by returning None from the tracing_endpoint) altogether + until the cert has been written to disk. + :param service_name: service name tag to attach to all traces generated by this charm. + Defaults to the juju application name this charm is deployed under. + :param extra_types: pass any number of types that you also wish to autoinstrument. + For example, charm libs, relation endpoint wrappers, workload abstractions, ... + :param buffer_max_events: max number of events to save in the buffer. Set to 0 to disable buffering. + :param buffer_max_size_mib: max size of the buffer file. When exceeded, spans will be dropped. + Minimum 10MiB. + :param buffer_path: path to buffer file to use for saving buffered spans. + """ + + def _decorator(charm_type: _T) -> _T: + """Autoinstrument the wrapped charmbase type.""" + _autoinstrument( + charm_type, + tracing_endpoint_attr=tracing_endpoint, + server_cert_attr=server_cert, + service_name=service_name, + extra_types=extra_types, + buffer_path=Path(buffer_path) if buffer_path else None, + buffer_max_size_mib=buffer_max_size_mib, + buffer_max_events=buffer_max_events, + ) + return charm_type + + return _decorator + + +def _autoinstrument( + charm_type: _T, + tracing_endpoint_attr: str, + server_cert_attr: Optional[str] = None, + service_name: Optional[str] = None, + extra_types: Sequence[type] = (), + buffer_max_events: int = BUFFER_DEFAULT_MAX_EVENT_HISTORY_LENGTH, + buffer_max_size_mib: int = BUFFER_DEFAULT_CACHE_FILE_SIZE_LIMIT_MiB, + buffer_path: Optional[Path] = None, +) -> _T: + """Set up tracing on this charm class. + + Use this function to get out-of-the-box traces for all events emitted on this charm and all + method calls on instances of this class. + + Usage: + + >>> from charms.tempo_coordinator_k8s.v0.charm_tracing import _autoinstrument + >>> from ops.main import main + >>> _autoinstrument( + >>> MyCharm, + >>> tracing_endpoint_attr="tempo_otlp_http_endpoint", + >>> service_name="MyCharm", + >>> extra_types=(Foo, Bar) + >>> ) + >>> main(MyCharm) + + :param charm_type: the CharmBase subclass to autoinstrument. + :param tracing_endpoint_attr: name of a method, property or attribute on the charm type that returns an + optional (fully resolvable) tempo url to which the charm traces will be pushed. + If None, tracing will be effectively disabled. + :param server_cert_attr: name of a method, property or attribute on the charm type that returns an + optional absolute path to a CA certificate file to be used when sending traces to a remote server. + If it returns None, an _insecure_ connection will be used. To avoid errors in transient + situations where the endpoint is already https but there is no certificate on disk yet, it + is recommended to disable tracing (by returning None from the tracing_endpoint) altogether + until the cert has been written to disk. + :param service_name: service name tag to attach to all traces generated by this charm. + Defaults to the juju application name this charm is deployed under. + :param extra_types: pass any number of types that you also wish to autoinstrument. + For example, charm libs, relation endpoint wrappers, workload abstractions, ... + :param buffer_max_events: max number of events to save in the buffer. Set to 0 to disable buffering. + :param buffer_max_size_mib: max size of the buffer file. When exceeded, spans will be dropped. + Minimum 10MiB. + :param buffer_path: path to buffer file to use for saving buffered spans. + """ + dev_logger.debug(f"instrumenting {charm_type}") + _setup_root_span_initializer( + charm_type, + tracing_endpoint_attr, + server_cert_attr=server_cert_attr, + service_name=service_name, + buffer_path=buffer_path, + buffer_max_events=buffer_max_events, + buffer_max_size_mib=buffer_max_size_mib, + ) + trace_type(charm_type) + for type_ in extra_types: + trace_type(type_) + + return charm_type + + +def trace_type(cls: _T) -> _T: + """Set up tracing on this class. + + Use this decorator to get out-of-the-box traces for all method calls on instances of this class. + It assumes that this class is only instantiated after a charm type decorated with `@trace_charm` + has been instantiated. + """ + dev_logger.debug(f"instrumenting {cls}") + for name, method in inspect.getmembers(cls, predicate=inspect.isfunction): + dev_logger.debug(f"discovered {method}") + + if method.__name__.startswith("__"): + dev_logger.debug(f"skipping {method} (dunder)") + continue + + # the span title in the general case should be: + # method call: MyCharmWrappedMethods.b + # if the method has a name (functools.wrapped or regular method), let + # _trace_callable use its default algorithm to determine what name to give the span. + trace_method_name = None + try: + qualname_c0 = method.__qualname__.split(".")[0] + if not hasattr(cls, method.__name__): + # if the callable doesn't have a __name__ (probably a decorated method), + # it probably has a bad qualname too (such as my_decorator..wrapper) which is not + # great for finding out what the trace is about. So we use the method name instead and + # add a reference to the decorator name. Result: + # method call: @my_decorator(MyCharmWrappedMethods.b) + trace_method_name = f"@{qualname_c0}({cls.__name__}.{name})" + except Exception: # noqa: failsafe + pass + + new_method = trace_method(method, name=trace_method_name) + + if isinstance(inspect.getattr_static(cls, name), staticmethod): + new_method = staticmethod(new_method) + setattr(cls, name, new_method) + + return cls + + +def trace_method(method: _F, name: Optional[str] = None) -> _F: + """Trace this method. + + A span will be opened when this method is called and closed when it returns. + """ + return _trace_callable(method, "method", name=name) + + +def trace_function(function: _F, name: Optional[str] = None) -> _F: + """Trace this function. + + A span will be opened when this function is called and closed when it returns. + """ + return _trace_callable(function, "function", name=name) + + +def _trace_callable(callable: _F, qualifier: str, name: Optional[str] = None) -> _F: + dev_logger.debug(f"instrumenting {callable}") + + # sig = inspect.signature(callable) + @functools.wraps(callable) + def wrapped_function(*args, **kwargs): # type: ignore + name_ = name or getattr( + callable, "__qualname__", getattr(callable, "__name__", str(callable)) + ) + with _span(f"{qualifier} call: {name_}"): # type: ignore + return callable(*args, **kwargs) # type: ignore + + # wrapped_function.__signature__ = sig + return wrapped_function # type: ignore + + +def trace(obj: Union[Type, Callable]): + """Trace this object and send the resulting spans to Tempo. + + It will dispatch to ``trace_type`` if the decorated object is a class, otherwise + ``trace_function``. + """ + if isinstance(obj, type): + if issubclass(obj, CharmBase): + raise ValueError( + "cannot use @trace on CharmBase subclasses: use @trace_charm instead " + "(we need some arguments!)" + ) + return trace_type(obj) + else: + try: + return trace_function(obj) + except Exception: + raise UntraceableObjectError( + f"cannot create span from {type(obj)}; instrument {obj} manually." + ) diff --git a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py new file mode 100644 index 0000000..2035dff --- /dev/null +++ b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -0,0 +1,1004 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +"""## Overview. + +This document explains how to integrate with the Tempo charm for the purpose of pushing traces to a +tracing endpoint provided by Tempo. It also explains how alternative implementations of the Tempo charm +may maintain the same interface and be backward compatible with all currently integrated charms. + +## Requirer Library Usage + +Charms seeking to push traces to Tempo, must do so using the `TracingEndpointRequirer` +object from this charm library. For the simplest use cases, using the `TracingEndpointRequirer` +object only requires instantiating it, typically in the constructor of your charm. The +`TracingEndpointRequirer` constructor requires the name of the relation over which a tracing endpoint + is exposed by the Tempo charm, and a list of protocols it intends to send traces with. + This relation must use the `tracing` interface. + The `TracingEndpointRequirer` object may be instantiated as follows + + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer + + def __init__(self, *args): + super().__init__(*args) + # ... + self.tracing = TracingEndpointRequirer(self, + protocols=['otlp_grpc', 'otlp_http', 'jaeger_http_thrift'] + ) + # ... + +Note that the first argument (`self`) to `TracingEndpointRequirer` is always a reference to the +parent charm. + +Alternatively to providing the list of requested protocols at init time, the charm can do it at +any point in time by calling the +`TracingEndpointRequirer.request_protocols(*protocol:str, relation:Optional[Relation])` method. +Using this method also allows you to use per-relation protocols. + +Units of requirer charms obtain the tempo endpoint to which they will push their traces by calling +`TracingEndpointRequirer.get_endpoint(protocol: str)`, where `protocol` is, for example: +- `otlp_grpc` +- `otlp_http` +- `zipkin` +- `tempo` + +If the `protocol` is not in the list of protocols that the charm requested at endpoint set-up time, +the library will raise an error. + +We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests +go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down. + +## Provider Library Usage + +The `TracingEndpointProvider` object may be used by charms to manage relations with their +trace sources. For this purposes a Tempo-like charm needs to do two things + +1. Instantiate the `TracingEndpointProvider` object by providing it a +reference to the parent (Tempo) charm and optionally the name of the relation that the Tempo charm +uses to interact with its trace sources. This relation must conform to the `tracing` interface +and it is strongly recommended that this relation be named `tracing` which is its +default value. + +For example a Tempo charm may instantiate the `TracingEndpointProvider` in its constructor as +follows + + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointProvider + + def __init__(self, *args): + super().__init__(*args) + # ... + self.tracing = TracingEndpointProvider(self) + # ... + + + +""" # noqa: W505 +import enum +import json +import logging +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Dict, + List, + Literal, + MutableMapping, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +import pydantic +from ops.charm import ( + CharmBase, + CharmEvents, + RelationBrokenEvent, + RelationEvent, + RelationRole, +) +from ops.framework import EventSource, Object +from ops.model import ModelError, Relation +from pydantic import BaseModel, Field + +# The unique Charmhub library identifier, never change it +LIBID = "d2f02b1f8d1244b5989fd55bc3a28943" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 3 + +PYDEPS = ["pydantic"] + +logger = logging.getLogger(__name__) + +DEFAULT_RELATION_NAME = "tracing" +RELATION_INTERFACE_NAME = "tracing" + +# Supported list rationale https://github.com/canonical/tempo-coordinator-k8s-operator/issues/8 +ReceiverProtocol = Literal[ + "zipkin", + "otlp_grpc", + "otlp_http", + "jaeger_grpc", + "jaeger_thrift_http", +] + +RawReceiver = Tuple[ReceiverProtocol, str] +"""Helper type. A raw receiver is defined as a tuple consisting of the protocol name, and the (external, if available), +(secured, if available) resolvable server url. +""" + +BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"} + + +class TransportProtocolType(str, enum.Enum): + """Receiver Type.""" + + http = "http" + grpc = "grpc" + + +receiver_protocol_to_transport_protocol: Dict[ReceiverProtocol, TransportProtocolType] = { + "zipkin": TransportProtocolType.http, + "otlp_grpc": TransportProtocolType.grpc, + "otlp_http": TransportProtocolType.http, + "jaeger_thrift_http": TransportProtocolType.http, + "jaeger_grpc": TransportProtocolType.grpc, +} +"""A mapping between telemetry protocols and their corresponding transport protocol. +""" + + +class TracingError(Exception): + """Base class for custom errors raised by this library.""" + + +class NotReadyError(TracingError): + """Raised by the provider wrapper if a requirer hasn't published the required data (yet).""" + + +class ProtocolNotRequestedError(TracingError): + """Raised if the user attempts to obtain an endpoint for a protocol it did not request.""" + + +class DataValidationError(TracingError): + """Raised when data validation fails on IPU relation data.""" + + +class AmbiguousRelationUsageError(TracingError): + """Raised when one wrongly assumes that there can only be one relation on an endpoint.""" + + +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class DatabagModel(BaseModel): # type: ignore + """Base databag model.""" + + class Config: + """Pydantic config.""" + + # ignore any extra fields in the databag + extra = "ignore" + """Ignore any extra fields in the databag.""" + allow_population_by_field_name = True + """Allow instantiating this class by field name (instead of forcing alias).""" + + _NEST_UNDER = None + + @classmethod + def load(cls, databag: MutableMapping): + """Load this model from a Juju databag.""" + if cls._NEST_UNDER: + return cls.parse_obj(json.loads(databag[cls._NEST_UNDER])) + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {f.alias for f in cls.__fields__.values()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.parse_raw(json.dumps(data)) # type: ignore + except pydantic.ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): + """Write the contents of this model to Juju databag. + + :param databag: the databag to write the data to. + :param clear: ensure the databag is cleared before writing it. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + + if self._NEST_UNDER: + databag[self._NEST_UNDER] = self.json(by_alias=True) + return databag + + dct = self.dict() + for key, field in self.__fields__.items(): # type: ignore + value = dct[key] + databag[field.alias or key] = json.dumps(value) + + return databag + +else: + from pydantic import ConfigDict + + class DatabagModel(BaseModel): + """Base databag model.""" + + model_config = ConfigDict( + # ignore any extra fields in the databag + extra="ignore", + # Allow instantiating this class by field name (instead of forcing alias). + populate_by_name=True, + # Custom config key: whether to nest the whole datastructure (as json) + # under a field or spread it out at the toplevel. + _NEST_UNDER=None, # type: ignore + ) + """Pydantic config.""" + + @classmethod + def load(cls, databag: MutableMapping): + """Load this model from a Juju databag.""" + nest_under = cls.model_config.get("_NEST_UNDER") # type: ignore + if nest_under: + return cls.model_validate(json.loads(databag[nest_under])) # type: ignore + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {(f.alias or n) for n, f in cls.__fields__.items()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.model_validate_json(json.dumps(data)) # type: ignore + except pydantic.ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): + """Write the contents of this model to Juju databag. + + :param databag: the databag to write the data to. + :param clear: ensure the databag is cleared before writing it. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + nest_under = self.model_config.get("_NEST_UNDER") + if nest_under: + databag[nest_under] = self.model_dump_json( # type: ignore + by_alias=True, + # skip keys whose values are default + exclude_defaults=True, + ) + return databag + + dct = self.model_dump() # type: ignore + for key, field in self.model_fields.items(): # type: ignore + value = dct[key] + if value == field.default: + continue + databag[field.alias or key] = json.dumps(value) + + return databag + + +# todo use models from charm-relation-interfaces +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class ProtocolType(BaseModel): # type: ignore + """Protocol Type.""" + + class Config: + """Pydantic config.""" + + use_enum_values = True + """Allow serializing enum values.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + +else: + + class ProtocolType(BaseModel): + """Protocol Type.""" + + model_config = ConfigDict( # type: ignore + # Allow serializing enum values. + use_enum_values=True + ) + """Pydantic config.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + + +class Receiver(BaseModel): + """Specification of an active receiver.""" + + protocol: ProtocolType = Field(..., description="Receiver protocol name and type.") + url: str = Field( + ..., + description="""URL at which the receiver is reachable. If there's an ingress, it would be the external URL. + Otherwise, it would be the service's fqdn or internal IP. + If the protocol type is grpc, the url will not contain a scheme.""", + examples=[ + "http://traefik_address:2331", + "https://traefik_address:2331", + "http://tempo_public_ip:2331", + "https://tempo_public_ip:2331", + "tempo_public_ip:2331", + ], + ) + + +class TracingProviderAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing provider.""" + + receivers: List[Receiver] = Field( + ..., + description="List of all receivers enabled on the tracing provider.", + ) + + +class TracingRequirerAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing requirer.""" + + receivers: List[ReceiverProtocol] + """Requested receivers.""" + + +class _AutoSnapshotEvent(RelationEvent): + __args__: Tuple[str, ...] = () + __optional_kwargs__: Dict[str, Any] = {} + + @classmethod + def __attrs__(cls): + return cls.__args__ + tuple(cls.__optional_kwargs__.keys()) + + def __init__(self, handle, relation, *args, **kwargs): + super().__init__(handle, relation) + + if not len(self.__args__) == len(args): + raise TypeError("expected {} args, got {}".format(len(self.__args__), len(args))) + + for attr, obj in zip(self.__args__, args): + setattr(self, attr, obj) + for attr, default in self.__optional_kwargs__.items(): + obj = kwargs.get(attr, default) + setattr(self, attr, obj) + + def snapshot(self) -> dict: + dct = super().snapshot() + for attr in self.__attrs__(): + obj = getattr(self, attr) + try: + dct[attr] = obj + except ValueError as e: + raise ValueError( + "cannot automagically serialize {}: " + "override this method and do it " + "manually.".format(obj) + ) from e + + return dct + + def restore(self, snapshot: dict) -> None: + super().restore(snapshot) + for attr, obj in snapshot.items(): + setattr(self, attr, obj) + + +class RelationNotFoundError(Exception): + """Raised if no relation with the given name is found.""" + + def __init__(self, relation_name: str): + self.relation_name = relation_name + self.message = "No relation named '{}' found".format(relation_name) + super().__init__(self.message) + + +class RelationInterfaceMismatchError(Exception): + """Raised if the relation with the given name has an unexpected interface.""" + + def __init__( + self, + relation_name: str, + expected_relation_interface: str, + actual_relation_interface: str, + ): + self.relation_name = relation_name + self.expected_relation_interface = expected_relation_interface + self.actual_relation_interface = actual_relation_interface + self.message = ( + "The '{}' relation has '{}' as interface rather than the expected '{}'".format( + relation_name, actual_relation_interface, expected_relation_interface + ) + ) + + super().__init__(self.message) + + +class RelationRoleMismatchError(Exception): + """Raised if the relation with the given name has a different role than expected.""" + + def __init__( + self, + relation_name: str, + expected_relation_role: RelationRole, + actual_relation_role: RelationRole, + ): + self.relation_name = relation_name + self.expected_relation_interface = expected_relation_role + self.actual_relation_role = actual_relation_role + self.message = "The '{}' relation has role '{}' rather than the expected '{}'".format( + relation_name, repr(actual_relation_role), repr(expected_relation_role) + ) + + super().__init__(self.message) + + +def _validate_relation_by_interface_and_direction( + charm: CharmBase, + relation_name: str, + expected_relation_interface: str, + expected_relation_role: RelationRole, +): + """Validate a relation. + + Verifies that the `relation_name` provided: (1) exists in metadata.yaml, + (2) declares as interface the interface name passed as `relation_interface` + and (3) has the right "direction", i.e., it is a relation that `charm` + provides or requires. + + Args: + charm: a `CharmBase` object to scan for the matching relation. + relation_name: the name of the relation to be verified. + expected_relation_interface: the interface name to be matched by the + relation named `relation_name`. + expected_relation_role: whether the `relation_name` must be either + provided or required by `charm`. + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the same relation interface + as specified via the `expected_relation_interface` argument. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the same role as specified + via the `expected_relation_role` argument. + """ + if relation_name not in charm.meta.relations: + raise RelationNotFoundError(relation_name) + + relation = charm.meta.relations[relation_name] + + # fixme: why do we need to cast here? + actual_relation_interface = cast(str, relation.interface_name) + + if actual_relation_interface != expected_relation_interface: + raise RelationInterfaceMismatchError( + relation_name, expected_relation_interface, actual_relation_interface + ) + + if expected_relation_role is RelationRole.provides: + if relation_name not in charm.meta.provides: + raise RelationRoleMismatchError( + relation_name, RelationRole.provides, RelationRole.requires + ) + elif expected_relation_role is RelationRole.requires: + if relation_name not in charm.meta.requires: + raise RelationRoleMismatchError( + relation_name, RelationRole.requires, RelationRole.provides + ) + else: + raise TypeError("Unexpected RelationDirection: {}".format(expected_relation_role)) + + +class RequestEvent(RelationEvent): + """Event emitted when a remote requests a tracing endpoint.""" + + @property + def requested_receivers(self) -> List[ReceiverProtocol]: + """List of receiver protocols that have been requested.""" + relation = self.relation + app = relation.app + if not app: + raise NotReadyError("relation.app is None") + + return TracingRequirerAppData.load(relation.data[app]).receivers + + +class BrokenEvent(RelationBrokenEvent): + """Event emitted when a relation on tracing is broken.""" + + +class TracingEndpointProviderEvents(CharmEvents): + """TracingEndpointProvider events.""" + + request = EventSource(RequestEvent) + broken = EventSource(BrokenEvent) + + +class TracingEndpointProvider(Object): + """Class representing a trace receiver service.""" + + on = TracingEndpointProviderEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + external_url: Optional[str] = None, + relation_name: str = DEFAULT_RELATION_NAME, + ): + """Initialize. + + Args: + charm: a `CharmBase` instance that manages this instance of the Tempo service. + external_url: external address of the node hosting the tempo server, + if an ingress is present. + relation_name: an optional string name of the relation between `charm` + and the Tempo charmed service. The default is "tracing". + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the `tracing` relation + interface. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the `RelationRole.requires` + role. + """ + _validate_relation_by_interface_and_direction( + charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.provides + ) + + super().__init__(charm, relation_name + "tracing-provider") + self._charm = charm + self._external_url = external_url + self._relation_name = relation_name + self.framework.observe( + self._charm.on[relation_name].relation_joined, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_created, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_changed, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_broken, self._on_relation_broken_event + ) + + def _on_relation_broken_event(self, e: RelationBrokenEvent): + """Handle relation broken events.""" + self.on.broken.emit(e.relation) + + def _on_relation_event(self, e: RelationEvent): + """Handle relation created/joined/changed events.""" + if self.is_requirer_ready(e.relation): + self.on.request.emit(e.relation) + + def is_requirer_ready(self, relation: Relation): + """Attempt to determine if requirer has already populated app data.""" + try: + self._get_requested_protocols(relation) + except NotReadyError: + return False + return True + + @staticmethod + def _get_requested_protocols(relation: Relation): + app = relation.app + if not app: + raise NotReadyError("relation.app is None") + + try: + databag = TracingRequirerAppData.load(relation.data[app]) + except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): + logger.info(f"relation {relation} is not ready to talk tracing") + raise NotReadyError() + return databag.receivers + + def requested_protocols(self): + """All receiver protocols that have been requested by our related apps.""" + requested_protocols = set() + for relation in self.relations: + try: + protocols = self._get_requested_protocols(relation) + except NotReadyError: + continue + requested_protocols.update(protocols) + return requested_protocols + + @property + def relations(self) -> List[Relation]: + """All relations active on this endpoint.""" + return self._charm.model.relations[self._relation_name] + + def publish_receivers(self, receivers: Sequence[RawReceiver]): + """Let all requirers know that these receivers are active and listening.""" + if not self._charm.unit.is_leader(): + raise RuntimeError("only leader can do this") + + for relation in self.relations: + try: + TracingProviderAppData( + receivers=[ + Receiver( + url=url, + protocol=ProtocolType( + name=protocol, + type=receiver_protocol_to_transport_protocol[protocol], + ), + ) + for protocol, url in receivers + ], + ).dump(relation.data[self._charm.app]) + + except ModelError as e: + # args are bytes + msg = e.args[0] + if isinstance(msg, bytes): + if msg.startswith( + b"ERROR cannot read relation application settings: permission denied" + ): + logger.error( + f"encountered error {e} while attempting to update_relation_data." + f"The relation must be gone." + ) + continue + raise + + +class EndpointRemovedEvent(RelationBrokenEvent): + """Event representing a change in one of the receiver endpoints.""" + + +class EndpointChangedEvent(_AutoSnapshotEvent): + """Event representing a change in one of the receiver endpoints.""" + + __args__ = ("_receivers",) + + if TYPE_CHECKING: + _receivers = [] # type: List[dict] + + @property + def receivers(self) -> List[Receiver]: + """Cast receivers back from dict.""" + return [Receiver(**i) for i in self._receivers] + + +class TracingEndpointRequirerEvents(CharmEvents): + """TracingEndpointRequirer events.""" + + endpoint_changed = EventSource(EndpointChangedEvent) + endpoint_removed = EventSource(EndpointRemovedEvent) + + +class TracingEndpointRequirer(Object): + """A tracing endpoint for Tempo.""" + + on = TracingEndpointRequirerEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + relation_name: str = DEFAULT_RELATION_NAME, + protocols: Optional[List[ReceiverProtocol]] = None, + ): + """Construct a tracing requirer for a Tempo charm. + + If your application supports pushing traces to a distributed tracing backend, the + `TracingEndpointRequirer` object enables your charm to easily access endpoint information + exchanged over a `tracing` relation interface. + + Args: + charm: a `CharmBase` object that manages this + `TracingEndpointRequirer` object. Typically, this is `self` in the instantiating + class. + relation_name: an optional string name of the relation between `charm` + and the Tempo charmed service. The default is "tracing". It is strongly + advised not to change the default, so that people deploying your charm will have a + consistent experience with all other charms that provide tracing endpoints. + protocols: optional list of protocols that the charm intends to send traces with. + The provider will enable receivers for these and only these protocols, + so be sure to enable all protocols the charm or its workload are going to need. + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the `tracing` relation + interface. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the `RelationRole.provides` + role. + """ + _validate_relation_by_interface_and_direction( + charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.requires + ) + + super().__init__(charm, relation_name) + + self._is_single_endpoint = charm.meta.relations[relation_name].limit == 1 + + self._charm = charm + self._relation_name = relation_name + + events = self._charm.on[self._relation_name] + self.framework.observe(events.relation_changed, self._on_tracing_relation_changed) + self.framework.observe(events.relation_broken, self._on_tracing_relation_broken) + + if protocols: + self.request_protocols(protocols) + + def request_protocols( + self, protocols: Sequence[ReceiverProtocol], relation: Optional[Relation] = None + ): + """Publish the list of protocols which the provider should activate.""" + # todo: should we check if _is_single_endpoint and len(self.relations) > 1 and raise, here? + relations = [relation] if relation else self.relations + + if not protocols: + # empty sequence + raise ValueError( + "You need to pass a nonempty sequence of protocols to `request_protocols`." + ) + + try: + if self._charm.unit.is_leader(): + for relation in relations: + TracingRequirerAppData( + receivers=list(protocols), + ).dump(relation.data[self._charm.app]) + + except ModelError as e: + # args are bytes + msg = e.args[0] + if isinstance(msg, bytes): + if msg.startswith( + b"ERROR cannot read relation application settings: permission denied" + ): + logger.error( + f"encountered error {e} while attempting to request_protocols." + f"The relation must be gone." + ) + return + raise + + @property + def relations(self) -> List[Relation]: + """The tracing relations associated with this endpoint.""" + return self._charm.model.relations[self._relation_name] + + @property + def _relation(self) -> Optional[Relation]: + """If this wraps a single endpoint, the relation bound to it, if any.""" + if not self._is_single_endpoint: + objname = type(self).__name__ + raise AmbiguousRelationUsageError( + f"This {objname} wraps a {self._relation_name} endpoint that has " + "limit != 1. We can't determine what relation, of the possibly many, you are " + f"talking about. Please pass a relation instance while calling {objname}, " + "or set limit=1 in the charm metadata." + ) + relations = self.relations + return relations[0] if relations else None + + def is_ready(self, relation: Optional[Relation] = None): + """Is this endpoint ready?""" + relation = relation or self._relation + if not relation: + logger.debug(f"no relation on {self._relation_name !r}: tracing not ready") + return False + if relation.data is None: + logger.error(f"relation data is None for {relation}") + return False + if not relation.app: + logger.error(f"{relation} event received but there is no relation.app") + return False + try: + databag = dict(relation.data[relation.app]) + TracingProviderAppData.load(databag) + + except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): + logger.info(f"failed validating relation data for {relation}") + return False + return True + + def _on_tracing_relation_changed(self, event): + """Notify the providers that there is new endpoint information available.""" + relation = event.relation + if not self.is_ready(relation): + self.on.endpoint_removed.emit(relation) # type: ignore + return + + data = TracingProviderAppData.load(relation.data[relation.app]) + self.on.endpoint_changed.emit(relation, [i.dict() for i in data.receivers]) # type: ignore + + def _on_tracing_relation_broken(self, event: RelationBrokenEvent): + """Notify the providers that the endpoint is broken.""" + relation = event.relation + self.on.endpoint_removed.emit(relation) # type: ignore + + def get_all_endpoints( + self, relation: Optional[Relation] = None + ) -> Optional[TracingProviderAppData]: + """Unmarshalled relation data.""" + relation = relation or self._relation + if not self.is_ready(relation): + return + return TracingProviderAppData.load(relation.data[relation.app]) # type: ignore + + def _get_endpoint( + self, relation: Optional[Relation], protocol: ReceiverProtocol + ) -> Optional[str]: + app_data = self.get_all_endpoints(relation) + if not app_data: + return None + receivers: List[Receiver] = list( + filter(lambda i: i.protocol.name == protocol, app_data.receivers) + ) + if not receivers: + logger.error(f"no receiver found with protocol={protocol!r}") + return + if len(receivers) > 1: + logger.error( + f"too many receivers with protocol={protocol!r}; using first one. Found: {receivers}" + ) + return + + receiver = receivers[0] + return receiver.url + + def get_endpoint( + self, protocol: ReceiverProtocol, relation: Optional[Relation] = None + ) -> Optional[str]: + """Receiver endpoint for the given protocol. + + It could happen that this function gets called before the provider publishes the endpoints. + In such a scenario, if a non-leader unit calls this function, a permission denied exception will be raised due to + restricted access. To prevent this, this function needs to be guarded by the `is_ready` check. + + Raises: + ProtocolNotRequestedError: + If the charm unit is the leader unit and attempts to obtain an endpoint for a protocol it did not request. + """ + endpoint = self._get_endpoint(relation or self._relation, protocol=protocol) + if not endpoint: + requested_protocols = set() + relations = [relation] if relation else self.relations + for relation in relations: + try: + databag = TracingRequirerAppData.load(relation.data[self._charm.app]) + except DataValidationError: + continue + + requested_protocols.update(databag.receivers) + + if protocol not in requested_protocols: + raise ProtocolNotRequestedError(protocol, relation) + + return None + return endpoint + + +def charm_tracing_config( + endpoint_requirer: TracingEndpointRequirer, cert_path: Optional[Union[Path, str]] +) -> Tuple[Optional[str], Optional[str]]: + """Return the charm_tracing config you likely want. + + If no endpoint is provided: + disable charm tracing. + If https endpoint is provided but cert_path is not found on disk: + disable charm tracing. + If https endpoint is provided and cert_path is None: + ERROR + Else: + proceed with charm tracing (with or without tls, as appropriate) + + Usage: + If you are using charm_tracing >= v1.9: + >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config + >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") + >>> class MyCharm(...): + >>> _cert_path = "/path/to/cert/on/charm/container.crt" + >>> def __init__(self, ...): + >>> self.tracing = TracingEndpointRequirer(...) + >>> self.my_endpoint, self.cert_path = charm_tracing_config( + ... self.tracing, self._cert_path) + + If you are using charm_tracing < v1.9: + >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config + >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") + >>> class MyCharm(...): + >>> _cert_path = "/path/to/cert/on/charm/container.crt" + >>> def __init__(self, ...): + >>> self.tracing = TracingEndpointRequirer(...) + >>> self._my_endpoint, self._cert_path = charm_tracing_config( + ... self.tracing, self._cert_path) + >>> @property + >>> def my_endpoint(self): + >>> return self._my_endpoint + >>> @property + >>> def cert_path(self): + >>> return self._cert_path + + """ + if not endpoint_requirer.is_ready(): + return None, None + + endpoint = endpoint_requirer.get_endpoint("otlp_http") + if not endpoint: + return None, None + + is_https = endpoint.startswith("https://") + + if is_https: + if cert_path is None or not Path(cert_path).exists(): + # disable charm tracing until we obtain a cert to prevent tls errors + logger.error( + "Tracing endpoint is https, but no server_cert has been passed." + "Please point @trace_charm to a `server_cert` attr. " + "This might also mean that the tracing provider is related to a " + "certificates provider, but this application is not (yet). " + "In that case, you might just have to wait a bit for the certificates " + "integration to settle. " + ) + return None, None + return endpoint, str(cert_path) + else: + return endpoint, None diff --git a/examples/flask/requirements.txt b/examples/flask/requirements.txt index e327757..e87af10 100644 --- a/examples/flask/requirements.txt +++ b/examples/flask/requirements.txt @@ -1,7 +1,5 @@ cosl -jsonschema >=4.19,<4.20 +jsonschema ops >= 2.6 # pydantic==2.6.4 https://github.com/canonical/paas-charm/archive/tempo-tracing.tar.gz -gunicorn -opentelemetry-distro diff --git a/src/paas_charm/_gunicorn/charm.py b/src/paas_charm/_gunicorn/charm.py index 61235f1..2794ead 100644 --- a/src/paas_charm/_gunicorn/charm.py +++ b/src/paas_charm/_gunicorn/charm.py @@ -8,7 +8,7 @@ from cosl import JujuTopology from ops.pebble import ExecError, ExecProcess -from paas_charm._gunicorn.webserver import GunicornWebserver, WebserverConfig, WorkerClassEnum +from paas_charm._gunicorn.webserver import GunicornWebserver, WebserverConfig from paas_charm._gunicorn.workload_config import create_workload_config from paas_charm._gunicorn.wsgi_app import WsgiApp from paas_charm.app import App, WorkloadConfig From 4da5103cb971194be225d25fe8539f37457da6a7 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 16 Dec 2024 11:03:28 +0300 Subject: [PATCH 04/70] chore(): Fix wrong function --- src/paas_charm/_gunicorn/charm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/paas_charm/_gunicorn/charm.py b/src/paas_charm/_gunicorn/charm.py index 2794ead..a712ca0 100644 --- a/src/paas_charm/_gunicorn/charm.py +++ b/src/paas_charm/_gunicorn/charm.py @@ -37,7 +37,7 @@ def _create_app(self, topology: JujuTopology) -> App: charm_state = self._create_charm_state() webserver = GunicornWebserver( - webserver_config=self.create_webserver_config(), + webserver_config=WebserverConfig.from_charm_config(dict(self.config)), workload_config=self._workload_config, container=self.unit.get_container(self._workload_config.container_name), ) From 311854fdc764179e1e1bca8f276d2fcf4d2d7509 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Tue, 17 Dec 2024 09:14:23 +0300 Subject: [PATCH 05/70] feat(tracing): Remove unused topology, bring order to code --- examples/flask/charmcraft.yaml | 6 +-- examples/flask/src/charm.py | 17 -------- src/paas_charm/_gunicorn/charm.py | 4 +- src/paas_charm/_gunicorn/webserver.py | 32 ++++----------- src/paas_charm/_gunicorn/wsgi_app.py | 3 -- src/paas_charm/app.py | 15 ++++--- src/paas_charm/charm.py | 57 ++++++++++++++++++--------- src/paas_charm/charm_state.py | 10 ++++- 8 files changed, 64 insertions(+), 80 deletions(-) diff --git a/examples/flask/charmcraft.yaml b/examples/flask/charmcraft.yaml index c73a3fe..83b86b2 100644 --- a/examples/flask/charmcraft.yaml +++ b/examples/flask/charmcraft.yaml @@ -130,11 +130,7 @@ requires: interface: rabbitmq optional: True limit: 1 - charm-tracing: - interface: tracing - limit: 1 - optional: true - workload-tracing: + tracing: interface: tracing limit: 1 optional: true diff --git a/examples/flask/src/charm.py b/examples/flask/src/charm.py index 5a6e1c7..1b5c207 100755 --- a/examples/flask/src/charm.py +++ b/examples/flask/src/charm.py @@ -8,14 +8,11 @@ import typing import ops -from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm -from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer, charm_tracing_config import paas_charm.flask logger = logging.getLogger(__name__) -@trace_charm(tracing_endpoint="charm_tracing_endpoint") class FlaskCharm(paas_charm.flask.Charm): """Flask Charm service.""" @@ -26,20 +23,6 @@ def __init__(self, *args: typing.Any) -> None: args: passthrough to CharmBase. """ super().__init__(*args) - self.charm_tracing = TracingEndpointRequirer(self, relation_name="charm-tracing", protocols=["otlp_http"]) - self.workload_tracing = TracingEndpointRequirer(self, relation_name="workload-tracing", protocols=["otlp_grpc"]) - if self.charm_tracing.is_ready(): - logger.info("```````````````: %s", self.charm_tracing.get_endpoint("otlp_http")) - if self.workload_tracing.is_ready(): - logger.info("```````````````: %s", self.workload_tracing.get_endpoint("otlp_grpc")) - # self.charm_tracing_endpoint, _ = charm_tracing_config(self.charm_tracing,None) - - @property - def charm_tracing_endpoint(self) -> str | None: - """Tempo endpoint for workload tracing""" - if self.charm_tracing.is_ready(): - return self.charm_tracing.get_endpoint("otlp_http") - return None if __name__ == "__main__": # pragma: nocover diff --git a/src/paas_charm/_gunicorn/charm.py b/src/paas_charm/_gunicorn/charm.py index a712ca0..faa3931 100644 --- a/src/paas_charm/_gunicorn/charm.py +++ b/src/paas_charm/_gunicorn/charm.py @@ -5,7 +5,6 @@ import logging -from cosl import JujuTopology from ops.pebble import ExecError, ExecProcess from paas_charm._gunicorn.webserver import GunicornWebserver, WebserverConfig @@ -28,7 +27,7 @@ def _workload_config(self) -> WorkloadConfig: framework_name=self._framework_name, unit_name=self.unit.name ) - def _create_app(self, topology: JujuTopology) -> App: + def _create_app(self) -> App: """Build an App instance for the Gunicorn based charm. Returns: @@ -48,5 +47,4 @@ def _create_app(self, topology: JujuTopology) -> App: workload_config=self._workload_config, webserver=webserver, database_migration=self._database_migration, - juju_topology=topology, ) diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index 9dbcf9c..46ea56e 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -130,53 +130,37 @@ def _config(self) -> str: error_log = repr( APPLICATION_ERROR_LOG_FILE_FMT.format(framework=self._workload_config.framework) ) + tracing_link = "http://grafana-agent-k8s-0.grafana-agent-k8s-endpoints.flask-model.svc.cluster.local:4318/v1/traces" + tracing_service_name = "flask-k8s-charm" config = textwrap.dedent( f"""\ from opentelemetry import trace - #from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import TracerProvider - from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter + from opentelemetry.sdk.trace.export import BatchSpanProcessor bind = ['0.0.0.0:{self._workload_config.port}'] chdir = {repr(str(self._workload_config.app_dir))} accesslog = {access_log} errorlog = {error_log} statsd_host = {repr(STATSD_HOST)} - """ - ) - config_2 = textwrap.dedent( - """\ def post_fork(server, worker): resource = Resource.create( - attributes={ - "service.name": "flask-k8s-app", - "compose_service": "flask-k8s-charm", - "charm_type": "FlaskCharm", + attributes={{ + "service.name": "{tracing_service_name}", "worker": worker.pid, - "juju_application":"flask-k8s", - "juju_model":"flask-model", - "juju_model_uuid":"c66de66b-d52b-4d7b-8efe-d5b14cbd54a3", - "juju_unit":"flask-k8s/0", - "juju_charm":"flask-k8s", - } + }} ) - trace.set_tracer_provider(TracerProvider(resource=resource)) - # This uses insecure connection for the purpose of example. Please see the - # OTLP Exporter documentation for other options. span_processor = BatchSpanProcessor( - OTLPSpanExporter(endpoint="http://grafana-agent-k8s-0.grafana-agent-k8s-endpoints.flask-model.svc.cluster.local:4318/v1/traces", - # OTLPSpanExporter(endpoint="grafana-agent-k8s-0.grafana-agent-k8s-endpoints.flask-model.svc.cluster.local:4317", - # insecure=True, + OTLPSpanExporter( + endpoint="{tracing_link}" ) ) trace.get_tracer_provider().add_span_processor(span_processor) - """ ) - config += config_2 config += "\n".join(config_entries) return config diff --git a/src/paas_charm/_gunicorn/wsgi_app.py b/src/paas_charm/_gunicorn/wsgi_app.py index 94cc83a..e5a5a20 100644 --- a/src/paas_charm/_gunicorn/wsgi_app.py +++ b/src/paas_charm/_gunicorn/wsgi_app.py @@ -6,7 +6,6 @@ import logging import ops -from cosl import JujuTopology from paas_charm._gunicorn.webserver import GunicornWebserver from paas_charm.app import App, WorkloadConfig @@ -27,7 +26,6 @@ def __init__( # pylint: disable=too-many-arguments workload_config: WorkloadConfig, database_migration: DatabaseMigration, webserver: GunicornWebserver, - juju_topology: JujuTopology, ): """Construct the WsgiApp instance. @@ -45,7 +43,6 @@ def __init__( # pylint: disable=too-many-arguments database_migration=database_migration, configuration_prefix=f"{workload_config.framework.upper()}_", framework_config_prefix=f"{workload_config.framework.upper()}_", - juju_topology=juju_topology ) self._webserver = webserver diff --git a/src/paas_charm/app.py b/src/paas_charm/app.py index 9ab98f3..b41a5e8 100644 --- a/src/paas_charm/app.py +++ b/src/paas_charm/app.py @@ -11,7 +11,6 @@ from typing import List import ops -from cosl import JujuTopology from paas_charm.charm_state import CharmState, IntegrationsState from paas_charm.database_migration import DatabaseMigration @@ -81,7 +80,6 @@ def __init__( # pylint: disable=too-many-arguments framework_config_prefix: str = "APP_", configuration_prefix: str = "APP_", integrations_prefix: str = "", - juju_topology: JujuTopology, ): """Construct the App instance. @@ -101,7 +99,6 @@ def __init__( # pylint: disable=too-many-arguments self.framework_config_prefix = framework_config_prefix self.configuration_prefix = configuration_prefix self.integrations_prefix = integrations_prefix - self._juju_topology = juju_topology def stop_all_services(self) -> None: """Stop all the services in the workload. @@ -155,11 +152,6 @@ def gen_environment(self) -> dict[str, str]: for k, v in framework_config.items() } ) - if self._juju_topology: - env["OTEL_RESOURCE_ATTRIBUTES"] = ( - f"juju_application={self._juju_topology.application},juju_model={self._juju_topology.model},juju_model_uuid={self._juju_topology.model_uuid},juju_unit={self._juju_topology.unit},juju_charm={self._juju_topology.charm_name}" - ) - env["OTEL_EXPORTER_OTLP_PROTOCOL"] = "grpc" if self._charm_state.base_url: env[f"{prefix}BASE_URL"] = self._charm_state.base_url @@ -267,6 +259,13 @@ def map_integrations_to_env(integrations: IntegrationsState, prefix: str = "") - for interface_name, uri in integrations.databases_uris.items(): interface_envvars = _db_url_to_env_variables(interface_name.upper(), uri) env.update(interface_envvars) + if integrations.tracing_uri: + tracing_envvars = _url_env_vars("TRACING", integrations.tracing_uri) + logger.info(f"GOT IT Tracing URI: {integrations.tracing_uri}") + logger.info(f"Tracing envvars: {tracing_envvars}") + env.update(tracing_envvars) + else: + logger.info(f"NO DICE Tracing URI: {integrations.tracing_uri}") if integrations.s3_parameters: s3 = integrations.s3_parameters diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 6f26dba..42561a8 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -10,9 +10,7 @@ from charms.data_platform_libs.v0.data_interfaces import DatabaseRequiresEvent from charms.redis_k8s.v0.redis import RedisRelationCharmEvents, RedisRequires -# from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer from charms.traefik_k8s.v2.ingress import IngressPerAppRequirer -from cosl import JujuTopology from ops.model import Container from pydantic import BaseModel, ValidationError @@ -47,6 +45,13 @@ "Missing charm library, please run `charmcraft fetch-lib charms.saml_integrator.v0.saml`" ) +try: + # pylint: disable=ungrouped-imports + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer +except ImportError: + logger.exception( + "Missing charm library, please run `charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing`" + ) class PaasCharm(abc.ABC, ops.CharmBase): # pylint: disable=too-many-instance-attributes """PaasCharm base charm service mixin. @@ -68,7 +73,7 @@ def _workload_config(self) -> WorkloadConfig: """Return an WorkloadConfig instance.""" @abc.abstractmethod - def _create_app(self, juju_topology: JujuTopology) -> App: + def _create_app(self) -> App: """Create an App instance.""" on = RedisRelationCharmEvents() @@ -85,13 +90,6 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self._secret_storage = KeySecretStorage(charm=self, key=f"{framework_name}_secret_key") self._database_requirers = make_database_requirers(self, self.app.name) - # self.tracing = TracingEndpointRequirer(self, - # protocols=['otlp_grpc', 'otlp_http'] - # ) - # if self.tracing.is_ready(): - # logger.info("=================== tracing[otlp_grpc]: %s", self.tracing.get_endpoint('otlp_grpc')) - # logger.info("=================== tracing[otlp_http]: %s", self.tracing.get_endpoint('otlp_http')) - requires = self.framework.meta.requires ######************* if "redis" in requires and requires["redis"].interface_name == "redis": self._redis = RedisRequires(charm=self, relation_name="redis") @@ -126,6 +124,17 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: else: self._rabbitmq = None + if "tracing" in requires and requires["tracing"].interface_name == "tracing": + logger.error("REQUESTING TRACING((((((((((((((((((((((((((((((()))))))))))))))))))))))))))))))") + self._tracing = TracingEndpointRequirer(self, relation_name="tracing", protocols=["otlp_http"]) + self.framework.observe(self._tracing.on.endpoint_changed, self._on_tracing_relation_changed) + self.framework.observe(self._tracing.on.endpoint_removed, self._on_tracing_relation_broken) + self.framework.observe(self.on[self._tracing._relation_name].relation_joined, self._on_rel_on_tracing_relation_changedation_changed) + self.framework.observe(self.on[self._tracing._relation_name].relation_changed, self._on_tracing_relation_changed) + self.framework.observe(self.on[self._tracing._relation_name].relation_broken, self._on_tracing_relation_broken) + if self._tracing.is_ready(): + logger.info("TRACING ENDPOINT: %s", self._tracing.get_endpoint("otlp_http")) + self._database_migration = DatabaseMigration( container=self.unit.get_container(self._workload_config.container_name), state_dir=self._workload_config.state_dir, @@ -172,8 +181,6 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self.framework.observe( self.on[self._workload_config.container_name].pebble_ready, self._on_pebble_ready ) - self._topology = JujuTopology.from_charm(self) - logger.info("-----------: %s", str(self._topology)) def get_framework_config(self) -> BaseModel: """Return the framework related configurations. @@ -208,9 +215,6 @@ def _container(self) -> Container: @block_if_invalid_config def _on_config_changed(self, _: ops.EventBase) -> None: """Configure the application pebble service layer.""" - # if self.tracing.is_ready(): - # logger.info("=================== tracing[otlp_grpc]: %s", self.tracing.get_endpoint('otlp_grpc')) - # logger.info("=================== tracing[otlp_http]: %s", self.tracing.get_endpoint('otlp_http')) self.restart() @block_if_invalid_config @@ -271,7 +275,7 @@ def is_ready(self) -> bool: missing_integrations = self._missing_required_integrations(charm_state) if missing_integrations: - self._create_app(self._topology).stop_all_services() + self._create_app().stop_all_services() self._database_migration.set_status_to_pending() message = f"missing integrations: {', '.join(missing_integrations)}" logger.info(message) @@ -311,6 +315,9 @@ def _missing_required_integrations(self, charm_state: CharmState) -> list[str]: if self._rabbitmq and not charm_state.integrations.rabbitmq_uri: if not requires["rabbitmq"].optional: missing_integrations.append("rabbitmq") + if self._tracing and not charm_state.integrations.tracing_uri: + if not requires["tracing"].optional: + missing_integrations.append("tracing") return missing_integrations def restart(self, rerun_migrations: bool = False) -> None: @@ -326,7 +333,7 @@ def restart(self, rerun_migrations: bool = False) -> None: return try: self.update_app_and_unit_status(ops.MaintenanceStatus("Preparing service for restart")) - self._create_app(self._topology).restart() + self._create_app().restart() except CharmConfigInvalidError as exc: self.update_app_and_unit_status(ops.BlockedStatus(exc.msg)) return @@ -342,8 +349,7 @@ def _gen_environment(self) -> dict[str, str]: Returns: A dictionary representing the application environment variables. """ - logger.info("-----------: %s", str(self._topology)) - env = self._create_app(self._topology).gen_environment() + env = self._create_app().gen_environment() return env @@ -376,6 +382,7 @@ def _create_charm_state(self) -> CharmState: s3_connection_info=self._s3.get_s3_connection_info() if self._s3 else None, saml_relation_data=saml_relation_data, rabbitmq_uri=self._rabbitmq.rabbitmq_uri() if self._rabbitmq else None, + tracing_uri=self._tracing.get_endpoint(protocol="otlp_http") if self._tracing.is_ready() else None, base_url=self._base_url, ) @@ -490,3 +497,15 @@ def _on_rabbitmq_ready(self, _: ops.HookEvent) -> None: def _on_rabbitmq_departed(self, _: ops.HookEvent) -> None: """Handle rabbitmq departed event.""" self.restart() + + @block_if_invalid_config + def _on_tracing_relation_changed(self, _: ops.HookEvent) -> None: + """Handle tracing relation changed event.""" + logger.error("Tracing relation changed") + self.restart + + @block_if_invalid_config + def _on_tracing_relation_broken(self, _: ops.HookEvent) -> None: + """Handle tracing relation broken event.""" + logger.error("Tracing relation broken") + self.restart diff --git a/src/paas_charm/charm_state.py b/src/paas_charm/charm_state.py index 98b3247..e07b4bc 100644 --- a/src/paas_charm/charm_state.py +++ b/src/paas_charm/charm_state.py @@ -89,6 +89,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None = None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, + tracing_uri: str | None = None, base_url: str | None = None, ) -> "CharmState": """Initialize a new instance of the CharmState class from the associated charm. @@ -103,6 +104,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info: Connection info from S3 lib. saml_relation_data: Relation data from the SAML app. rabbitmq_uri: RabbitMQ uri. + tracing_uri: The tracing uri provided by the Tempo coordinator charm. base_url: Base URL for the service. Return: @@ -123,6 +125,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info=s3_connection_info, saml_relation_data=saml_relation_data, rabbitmq_uri=rabbitmq_uri, + tracing_uri=tracing_uri, ) return cls( framework=framework, @@ -209,6 +212,7 @@ class IntegrationsState: s3_parameters: S3 parameters. saml_parameters: SAML parameters. rabbitmq_uri: RabbitMQ uri. + tracing_uri: The tracing uri provided by the Tempo coordinator charm. """ redis_uri: str | None = None @@ -216,6 +220,7 @@ class IntegrationsState: s3_parameters: "S3Parameters | None" = None saml_parameters: "SamlParameters | None" = None rabbitmq_uri: str | None = None + tracing_uri: str | None = None # This dataclass combines all the integrations, so it is reasonable that they stay together. @classmethod @@ -227,6 +232,7 @@ def build( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, + tracing_uri: str | None = None, ) -> "IntegrationsState": """Initialize a new instance of the IntegrationsState class. @@ -238,6 +244,7 @@ def build( # pylint: disable=too-many-arguments s3_connection_info: S3 connection info from S3 lib. saml_relation_data: Saml relation data from saml lib. rabbitmq_uri: RabbitMQ uri. + tracing_uri: The tracing uri provided by the Tempo coordinator charm. Return: The IntegrationsState instance created. @@ -275,7 +282,7 @@ def build( # pylint: disable=too-many-arguments # as None while the integration is being created. if redis_uri is not None and re.fullmatch(r"redis://[^:/]+:None", redis_uri): redis_uri = None - + logger.warning(f"TRACING URI: {tracing_uri}") return cls( redis_uri=redis_uri, databases_uris={ @@ -286,6 +293,7 @@ def build( # pylint: disable=too-many-arguments s3_parameters=s3_parameters, saml_parameters=saml_parameters, rabbitmq_uri=rabbitmq_uri, + tracing_uri=tracing_uri, ) From 8ba4b39af101f5f6b9b085b0000e6203d044cdaa Mon Sep 17 00:00:00 2001 From: ali ugur Date: Tue, 17 Dec 2024 11:50:27 +0300 Subject: [PATCH 06/70] feat(tracing): Implemented environmental approach --- src/paas_charm/_gunicorn/webserver.py | 10 +++++++--- src/paas_charm/app.py | 9 ++------- src/paas_charm/charm.py | 17 ++++++++--------- src/paas_charm/charm_state.py | 20 +++++++++++--------- 4 files changed, 28 insertions(+), 28 deletions(-) diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index 46ea56e..72429e4 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -130,8 +130,12 @@ def _config(self) -> str: error_log = repr( APPLICATION_ERROR_LOG_FILE_FMT.format(framework=self._workload_config.framework) ) - tracing_link = "http://grafana-agent-k8s-0.grafana-agent-k8s-endpoints.flask-model.svc.cluster.local:4318/v1/traces" - tracing_service_name = "flask-k8s-charm" + framework_environments = self._container.get_plan().to_dict()['services'][self._workload_config.framework]['environment'] + tracing_uri = None + tracing_service_name = None + if framework_environments.get('TRACING_URI', None): + tracing_uri = framework_environments['TRACING_URI'] + tracing_service_name = framework_environments['TRACING_SERVICE_NAME'] config = textwrap.dedent( f"""\ from opentelemetry import trace @@ -155,7 +159,7 @@ def post_fork(server, worker): trace.set_tracer_provider(TracerProvider(resource=resource)) span_processor = BatchSpanProcessor( OTLPSpanExporter( - endpoint="{tracing_link}" + endpoint="{tracing_uri}/v1/traces" ) ) trace.get_tracer_provider().add_span_processor(span_processor) diff --git a/src/paas_charm/app.py b/src/paas_charm/app.py index b41a5e8..79990b9 100644 --- a/src/paas_charm/app.py +++ b/src/paas_charm/app.py @@ -259,13 +259,8 @@ def map_integrations_to_env(integrations: IntegrationsState, prefix: str = "") - for interface_name, uri in integrations.databases_uris.items(): interface_envvars = _db_url_to_env_variables(interface_name.upper(), uri) env.update(interface_envvars) - if integrations.tracing_uri: - tracing_envvars = _url_env_vars("TRACING", integrations.tracing_uri) - logger.info(f"GOT IT Tracing URI: {integrations.tracing_uri}") - logger.info(f"Tracing envvars: {tracing_envvars}") - env.update(tracing_envvars) - else: - logger.info(f"NO DICE Tracing URI: {integrations.tracing_uri}") + if integrations.tracing_relation_data: + env.update(integrations.tracing_relation_data) if integrations.s3_parameters: s3 = integrations.s3_parameters diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 42561a8..4d2e5b4 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -129,11 +129,6 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self._tracing = TracingEndpointRequirer(self, relation_name="tracing", protocols=["otlp_http"]) self.framework.observe(self._tracing.on.endpoint_changed, self._on_tracing_relation_changed) self.framework.observe(self._tracing.on.endpoint_removed, self._on_tracing_relation_broken) - self.framework.observe(self.on[self._tracing._relation_name].relation_joined, self._on_rel_on_tracing_relation_changedation_changed) - self.framework.observe(self.on[self._tracing._relation_name].relation_changed, self._on_tracing_relation_changed) - self.framework.observe(self.on[self._tracing._relation_name].relation_broken, self._on_tracing_relation_broken) - if self._tracing.is_ready(): - logger.info("TRACING ENDPOINT: %s", self._tracing.get_endpoint("otlp_http")) self._database_migration = DatabaseMigration( container=self.unit.get_container(self._workload_config.container_name), @@ -315,7 +310,7 @@ def _missing_required_integrations(self, charm_state: CharmState) -> list[str]: if self._rabbitmq and not charm_state.integrations.rabbitmq_uri: if not requires["rabbitmq"].optional: missing_integrations.append("rabbitmq") - if self._tracing and not charm_state.integrations.tracing_uri: + if self._tracing and not charm_state.integrations.tracing_relation_data: if not requires["tracing"].optional: missing_integrations.append("tracing") return missing_integrations @@ -372,6 +367,10 @@ def _create_charm_state(self) -> CharmState: for k, v in charm_config.items() }, ) + tracing_relation_data = None + if self._tracing and self._tracing.is_ready(): + tracing_relation_data = {"TRACING_URI": self._tracing.get_endpoint(protocol="otlp_http"), + "TRACING_SERVICE_NAME": f"{self.framework.meta.name}-charm"} return CharmState.from_charm( config=config, framework=self._framework_name, @@ -382,7 +381,7 @@ def _create_charm_state(self) -> CharmState: s3_connection_info=self._s3.get_s3_connection_info() if self._s3 else None, saml_relation_data=saml_relation_data, rabbitmq_uri=self._rabbitmq.rabbitmq_uri() if self._rabbitmq else None, - tracing_uri=self._tracing.get_endpoint(protocol="otlp_http") if self._tracing.is_ready() else None, + tracing_relation_data=tracing_relation_data, base_url=self._base_url, ) @@ -502,10 +501,10 @@ def _on_rabbitmq_departed(self, _: ops.HookEvent) -> None: def _on_tracing_relation_changed(self, _: ops.HookEvent) -> None: """Handle tracing relation changed event.""" logger.error("Tracing relation changed") - self.restart + self.restart() @block_if_invalid_config def _on_tracing_relation_broken(self, _: ops.HookEvent) -> None: """Handle tracing relation broken event.""" logger.error("Tracing relation broken") - self.restart + self.restart() diff --git a/src/paas_charm/charm_state.py b/src/paas_charm/charm_state.py index e07b4bc..607fcf8 100644 --- a/src/paas_charm/charm_state.py +++ b/src/paas_charm/charm_state.py @@ -89,7 +89,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None = None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, - tracing_uri: str | None = None, + tracing_relation_data: typing.MutableMapping[str, str] | None = None, base_url: str | None = None, ) -> "CharmState": """Initialize a new instance of the CharmState class from the associated charm. @@ -104,7 +104,8 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info: Connection info from S3 lib. saml_relation_data: Relation data from the SAML app. rabbitmq_uri: RabbitMQ uri. - tracing_uri: The tracing uri provided by the Tempo coordinator charm. + tracing_relation_data: The tracing uri provided by the Tempo coordinator charm + and charm name. base_url: Base URL for the service. Return: @@ -125,7 +126,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info=s3_connection_info, saml_relation_data=saml_relation_data, rabbitmq_uri=rabbitmq_uri, - tracing_uri=tracing_uri, + tracing_relation_data=tracing_relation_data, ) return cls( framework=framework, @@ -212,7 +213,8 @@ class IntegrationsState: s3_parameters: S3 parameters. saml_parameters: SAML parameters. rabbitmq_uri: RabbitMQ uri. - tracing_uri: The tracing uri provided by the Tempo coordinator charm. + tracing_relation_data: The tracing uri provided by the Tempo coordinator charm + and charm name. """ redis_uri: str | None = None @@ -220,7 +222,7 @@ class IntegrationsState: s3_parameters: "S3Parameters | None" = None saml_parameters: "SamlParameters | None" = None rabbitmq_uri: str | None = None - tracing_uri: str | None = None + tracing_relation_data: dict[str, str] = field(default_factory=dict) # This dataclass combines all the integrations, so it is reasonable that they stay together. @classmethod @@ -232,7 +234,7 @@ def build( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, - tracing_uri: str | None = None, + tracing_relation_data: typing.MutableMapping[str, str] | None = None, ) -> "IntegrationsState": """Initialize a new instance of the IntegrationsState class. @@ -244,7 +246,8 @@ def build( # pylint: disable=too-many-arguments s3_connection_info: S3 connection info from S3 lib. saml_relation_data: Saml relation data from saml lib. rabbitmq_uri: RabbitMQ uri. - tracing_uri: The tracing uri provided by the Tempo coordinator charm. + tracing_relation_data: The tracing uri provided by the Tempo coordinator charm + and charm name. Return: The IntegrationsState instance created. @@ -282,7 +285,6 @@ def build( # pylint: disable=too-many-arguments # as None while the integration is being created. if redis_uri is not None and re.fullmatch(r"redis://[^:/]+:None", redis_uri): redis_uri = None - logger.warning(f"TRACING URI: {tracing_uri}") return cls( redis_uri=redis_uri, databases_uris={ @@ -293,7 +295,7 @@ def build( # pylint: disable=too-many-arguments s3_parameters=s3_parameters, saml_parameters=saml_parameters, rabbitmq_uri=rabbitmq_uri, - tracing_uri=tracing_uri, + tracing_relation_data=tracing_relation_data, ) From 5df179c3bd958e1d03c8960a7536fc0b03f9a8f6 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Tue, 17 Dec 2024 12:40:58 +0300 Subject: [PATCH 07/70] chore(): Remove unnecessary log --- src/paas_charm/charm.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 4d2e5b4..8f96db8 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -125,7 +125,6 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self._rabbitmq = None if "tracing" in requires and requires["tracing"].interface_name == "tracing": - logger.error("REQUESTING TRACING((((((((((((((((((((((((((((((()))))))))))))))))))))))))))))))") self._tracing = TracingEndpointRequirer(self, relation_name="tracing", protocols=["otlp_http"]) self.framework.observe(self._tracing.on.endpoint_changed, self._on_tracing_relation_changed) self.framework.observe(self._tracing.on.endpoint_removed, self._on_tracing_relation_broken) @@ -500,11 +499,9 @@ def _on_rabbitmq_departed(self, _: ops.HookEvent) -> None: @block_if_invalid_config def _on_tracing_relation_changed(self, _: ops.HookEvent) -> None: """Handle tracing relation changed event.""" - logger.error("Tracing relation changed") self.restart() @block_if_invalid_config def _on_tracing_relation_broken(self, _: ops.HookEvent) -> None: """Handle tracing relation broken event.""" - logger.error("Tracing relation broken") self.restart() From ae59283a76a9dadd74f6b1bafa6b2613ada842da Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 18 Dec 2024 10:38:16 +0300 Subject: [PATCH 08/70] chore(): Write interface --- examples/django/charm/charmcraft.yaml | 10 +- examples/fastapi/charm/charmcraft.yaml | 10 +- .../tempo_coordinator_k8s/v0/tracing.py | 998 ++++++++++++++++++ .../tempo_coordinator_k8s/v0/charm_tracing.py | 23 +- .../tempo_coordinator_k8s/v0/tracing.py | 8 +- examples/flask/src/charm.py | 1 + examples/flask/test_rock/app.py | 6 +- examples/flask/test_rock/requirements.txt | 1 - src/paas_charm/__init__.py | 7 + src/paas_charm/_gunicorn/charm.py | 3 - src/paas_charm/_gunicorn/webserver.py | 12 +- src/paas_charm/app.py | 9 +- src/paas_charm/charm.py | 47 +- src/paas_charm/charm_state.py | 18 +- 14 files changed, 1080 insertions(+), 73 deletions(-) create mode 100644 examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py diff --git a/examples/django/charm/charmcraft.yaml b/examples/django/charm/charmcraft.yaml index 05bf5df..cf7f0e2 100644 --- a/examples/django/charm/charmcraft.yaml +++ b/examples/django/charm/charmcraft.yaml @@ -50,9 +50,9 @@ config: type: string django-secret-key-id: description: >- - This configuration is similar to `django-secret-key`, but instead accepts a Juju user secret ID. - The secret should contain a single key, "value", which maps to the actual Django secret key. - To create the secret, run the following command: + This configuration is similar to `django-secret-key`, but instead accepts a Juju user secret ID. + The secret should contain a single key, "value", which maps to the actual Django secret key. + To create the secret, run the following command: `juju add-secret my-django-secret-key value= && juju grant-secret my-django-secret-key django-k8s`, and use the outputted secret ID to configure this option. type: secret @@ -90,6 +90,10 @@ requires: interface: postgresql_client optional: False limit: 1 + tracing: + interface: tracing + optional: True + limit: 1 resources: django-app-image: description: django application image. diff --git a/examples/fastapi/charm/charmcraft.yaml b/examples/fastapi/charm/charmcraft.yaml index fc19128..fbfc34d 100644 --- a/examples/fastapi/charm/charmcraft.yaml +++ b/examples/fastapi/charm/charmcraft.yaml @@ -50,9 +50,9 @@ config: app-secret-key-id: type: secret description: >- - This configuration is similar to `app-secret-key`, but instead accepts a Juju user secret ID. - The secret should contain a single key, "value", which maps to the actual secret key. - To create the secret, run the following command: + This configuration is similar to `app-secret-key`, but instead accepts a Juju user secret ID. + The secret should contain a single key, "value", which maps to the actual secret key. + To create the secret, run the following command: `juju add-secret my-secret-key value= && juju grant-secret my-secret-key fastapi-k8s`, and use the outputted secret ID to configure this option. user-defined-config: @@ -79,6 +79,10 @@ requires: interface: postgresql_client optional: True limit: 1 + tracing: + interface: tracing + optional: True + limit: 1 resources: app-image: description: FastAPI application image. diff --git a/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py new file mode 100644 index 0000000..4516af6 --- /dev/null +++ b/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -0,0 +1,998 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +"""## Overview. + +This document explains how to integrate with the Tempo charm for the purpose of pushing traces to a +tracing endpoint provided by Tempo. It also explains how alternative implementations of the Tempo charm +may maintain the same interface and be backward compatible with all currently integrated charms. + +## Requirer Library Usage + +Charms seeking to push traces to Tempo, must do so using the `TracingEndpointRequirer` +object from this charm library. For the simplest use cases, using the `TracingEndpointRequirer` +object only requires instantiating it, typically in the constructor of your charm. The +`TracingEndpointRequirer` constructor requires the name of the relation over which a tracing endpoint + is exposed by the Tempo charm, and a list of protocols it intends to send traces with. + This relation must use the `tracing` interface. + The `TracingEndpointRequirer` object may be instantiated as follows + + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer + + def __init__(self, *args): + super().__init__(*args) + # ... + self.tracing = TracingEndpointRequirer(self, + protocols=['otlp_grpc', 'otlp_http', 'jaeger_http_thrift'] + ) + # ... + +Note that the first argument (`self`) to `TracingEndpointRequirer` is always a reference to the +parent charm. + +Alternatively to providing the list of requested protocols at init time, the charm can do it at +any point in time by calling the +`TracingEndpointRequirer.request_protocols(*protocol:str, relation:Optional[Relation])` method. +Using this method also allows you to use per-relation protocols. + +Units of requirer charms obtain the tempo endpoint to which they will push their traces by calling +`TracingEndpointRequirer.get_endpoint(protocol: str)`, where `protocol` is, for example: +- `otlp_grpc` +- `otlp_http` +- `zipkin` +- `tempo` + +If the `protocol` is not in the list of protocols that the charm requested at endpoint set-up time, +the library will raise an error. + +We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests +go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down. + +## Provider Library Usage + +The `TracingEndpointProvider` object may be used by charms to manage relations with their +trace sources. For this purposes a Tempo-like charm needs to do two things + +1. Instantiate the `TracingEndpointProvider` object by providing it a +reference to the parent (Tempo) charm and optionally the name of the relation that the Tempo charm +uses to interact with its trace sources. This relation must conform to the `tracing` interface +and it is strongly recommended that this relation be named `tracing` which is its +default value. + +For example a Tempo charm may instantiate the `TracingEndpointProvider` in its constructor as +follows + + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointProvider + + def __init__(self, *args): + super().__init__(*args) + # ... + self.tracing = TracingEndpointProvider(self) + # ... + + + +""" # noqa: W505 +import enum +import json +import logging +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Dict, + List, + Literal, + MutableMapping, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +import pydantic +from ops.charm import CharmBase, CharmEvents, RelationBrokenEvent, RelationEvent, RelationRole +from ops.framework import EventSource, Object +from ops.model import ModelError, Relation +from pydantic import BaseModel, Field + +# The unique Charmhub library identifier, never change it +LIBID = "d2f02b1f8d1244b5989fd55bc3a28943" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 3 + +PYDEPS = ["pydantic"] + +logger = logging.getLogger(__name__) + +DEFAULT_RELATION_NAME = "tracing" +RELATION_INTERFACE_NAME = "tracing" + +# Supported list rationale https://github.com/canonical/tempo-coordinator-k8s-operator/issues/8 +ReceiverProtocol = Literal[ + "zipkin", + "otlp_grpc", + "otlp_http", + "jaeger_grpc", + "jaeger_thrift_http", +] + +RawReceiver = Tuple[ReceiverProtocol, str] +"""Helper type. A raw receiver is defined as a tuple consisting of the protocol name, and the (external, if available), +(secured, if available) resolvable server url. +""" + +BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"} + + +class TransportProtocolType(str, enum.Enum): + """Receiver Type.""" + + http = "http" + grpc = "grpc" + + +receiver_protocol_to_transport_protocol: Dict[ReceiverProtocol, TransportProtocolType] = { + "zipkin": TransportProtocolType.http, + "otlp_grpc": TransportProtocolType.grpc, + "otlp_http": TransportProtocolType.http, + "jaeger_thrift_http": TransportProtocolType.http, + "jaeger_grpc": TransportProtocolType.grpc, +} +"""A mapping between telemetry protocols and their corresponding transport protocol. +""" + + +class TracingError(Exception): + """Base class for custom errors raised by this library.""" + + +class NotReadyError(TracingError): + """Raised by the provider wrapper if a requirer hasn't published the required data (yet).""" + + +class ProtocolNotRequestedError(TracingError): + """Raised if the user attempts to obtain an endpoint for a protocol it did not request.""" + + +class DataValidationError(TracingError): + """Raised when data validation fails on IPU relation data.""" + + +class AmbiguousRelationUsageError(TracingError): + """Raised when one wrongly assumes that there can only be one relation on an endpoint.""" + + +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class DatabagModel(BaseModel): # type: ignore + """Base databag model.""" + + class Config: + """Pydantic config.""" + + # ignore any extra fields in the databag + extra = "ignore" + """Ignore any extra fields in the databag.""" + allow_population_by_field_name = True + """Allow instantiating this class by field name (instead of forcing alias).""" + + _NEST_UNDER = None + + @classmethod + def load(cls, databag: MutableMapping): + """Load this model from a Juju databag.""" + if cls._NEST_UNDER: + return cls.parse_obj(json.loads(databag[cls._NEST_UNDER])) + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {f.alias for f in cls.__fields__.values()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.parse_raw(json.dumps(data)) # type: ignore + except pydantic.ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): + """Write the contents of this model to Juju databag. + + :param databag: the databag to write the data to. + :param clear: ensure the databag is cleared before writing it. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + + if self._NEST_UNDER: + databag[self._NEST_UNDER] = self.json(by_alias=True) + return databag + + dct = self.dict() + for key, field in self.__fields__.items(): # type: ignore + value = dct[key] + databag[field.alias or key] = json.dumps(value) + + return databag + +else: + from pydantic import ConfigDict + + class DatabagModel(BaseModel): + """Base databag model.""" + + model_config = ConfigDict( + # ignore any extra fields in the databag + extra="ignore", + # Allow instantiating this class by field name (instead of forcing alias). + populate_by_name=True, + # Custom config key: whether to nest the whole datastructure (as json) + # under a field or spread it out at the toplevel. + _NEST_UNDER=None, # type: ignore + ) + """Pydantic config.""" + + @classmethod + def load(cls, databag: MutableMapping): + """Load this model from a Juju databag.""" + nest_under = cls.model_config.get("_NEST_UNDER") # type: ignore + if nest_under: + return cls.model_validate(json.loads(databag[nest_under])) # type: ignore + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {(f.alias or n) for n, f in cls.__fields__.items()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.model_validate_json(json.dumps(data)) # type: ignore + except pydantic.ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): + """Write the contents of this model to Juju databag. + + :param databag: the databag to write the data to. + :param clear: ensure the databag is cleared before writing it. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + nest_under = self.model_config.get("_NEST_UNDER") + if nest_under: + databag[nest_under] = self.model_dump_json( # type: ignore + by_alias=True, + # skip keys whose values are default + exclude_defaults=True, + ) + return databag + + dct = self.model_dump() # type: ignore + for key, field in self.model_fields.items(): # type: ignore + value = dct[key] + if value == field.default: + continue + databag[field.alias or key] = json.dumps(value) + + return databag + + +# todo use models from charm-relation-interfaces +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class ProtocolType(BaseModel): # type: ignore + """Protocol Type.""" + + class Config: + """Pydantic config.""" + + use_enum_values = True + """Allow serializing enum values.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + +else: + + class ProtocolType(BaseModel): + """Protocol Type.""" + + model_config = ConfigDict( # type: ignore + # Allow serializing enum values. + use_enum_values=True + ) + """Pydantic config.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + + +class Receiver(BaseModel): + """Specification of an active receiver.""" + + protocol: ProtocolType = Field(..., description="Receiver protocol name and type.") + url: str = Field( + ..., + description="""URL at which the receiver is reachable. If there's an ingress, it would be the external URL. + Otherwise, it would be the service's fqdn or internal IP. + If the protocol type is grpc, the url will not contain a scheme.""", + examples=[ + "http://traefik_address:2331", + "https://traefik_address:2331", + "http://tempo_public_ip:2331", + "https://tempo_public_ip:2331", + "tempo_public_ip:2331", + ], + ) + + +class TracingProviderAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing provider.""" + + receivers: List[Receiver] = Field( + ..., + description="List of all receivers enabled on the tracing provider.", + ) + + +class TracingRequirerAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing requirer.""" + + receivers: List[ReceiverProtocol] + """Requested receivers.""" + + +class _AutoSnapshotEvent(RelationEvent): + __args__: Tuple[str, ...] = () + __optional_kwargs__: Dict[str, Any] = {} + + @classmethod + def __attrs__(cls): + return cls.__args__ + tuple(cls.__optional_kwargs__.keys()) + + def __init__(self, handle, relation, *args, **kwargs): + super().__init__(handle, relation) + + if not len(self.__args__) == len(args): + raise TypeError("expected {} args, got {}".format(len(self.__args__), len(args))) + + for attr, obj in zip(self.__args__, args): + setattr(self, attr, obj) + for attr, default in self.__optional_kwargs__.items(): + obj = kwargs.get(attr, default) + setattr(self, attr, obj) + + def snapshot(self) -> dict: + dct = super().snapshot() + for attr in self.__attrs__(): + obj = getattr(self, attr) + try: + dct[attr] = obj + except ValueError as e: + raise ValueError( + "cannot automagically serialize {}: " + "override this method and do it " + "manually.".format(obj) + ) from e + + return dct + + def restore(self, snapshot: dict) -> None: + super().restore(snapshot) + for attr, obj in snapshot.items(): + setattr(self, attr, obj) + + +class RelationNotFoundError(Exception): + """Raised if no relation with the given name is found.""" + + def __init__(self, relation_name: str): + self.relation_name = relation_name + self.message = "No relation named '{}' found".format(relation_name) + super().__init__(self.message) + + +class RelationInterfaceMismatchError(Exception): + """Raised if the relation with the given name has an unexpected interface.""" + + def __init__( + self, + relation_name: str, + expected_relation_interface: str, + actual_relation_interface: str, + ): + self.relation_name = relation_name + self.expected_relation_interface = expected_relation_interface + self.actual_relation_interface = actual_relation_interface + self.message = ( + "The '{}' relation has '{}' as interface rather than the expected '{}'".format( + relation_name, actual_relation_interface, expected_relation_interface + ) + ) + + super().__init__(self.message) + + +class RelationRoleMismatchError(Exception): + """Raised if the relation with the given name has a different role than expected.""" + + def __init__( + self, + relation_name: str, + expected_relation_role: RelationRole, + actual_relation_role: RelationRole, + ): + self.relation_name = relation_name + self.expected_relation_interface = expected_relation_role + self.actual_relation_role = actual_relation_role + self.message = "The '{}' relation has role '{}' rather than the expected '{}'".format( + relation_name, repr(actual_relation_role), repr(expected_relation_role) + ) + + super().__init__(self.message) + + +def _validate_relation_by_interface_and_direction( + charm: CharmBase, + relation_name: str, + expected_relation_interface: str, + expected_relation_role: RelationRole, +): + """Validate a relation. + + Verifies that the `relation_name` provided: (1) exists in metadata.yaml, + (2) declares as interface the interface name passed as `relation_interface` + and (3) has the right "direction", i.e., it is a relation that `charm` + provides or requires. + + Args: + charm: a `CharmBase` object to scan for the matching relation. + relation_name: the name of the relation to be verified. + expected_relation_interface: the interface name to be matched by the + relation named `relation_name`. + expected_relation_role: whether the `relation_name` must be either + provided or required by `charm`. + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the same relation interface + as specified via the `expected_relation_interface` argument. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the same role as specified + via the `expected_relation_role` argument. + """ + if relation_name not in charm.meta.relations: + raise RelationNotFoundError(relation_name) + + relation = charm.meta.relations[relation_name] + + # fixme: why do we need to cast here? + actual_relation_interface = cast(str, relation.interface_name) + + if actual_relation_interface != expected_relation_interface: + raise RelationInterfaceMismatchError( + relation_name, expected_relation_interface, actual_relation_interface + ) + + if expected_relation_role is RelationRole.provides: + if relation_name not in charm.meta.provides: + raise RelationRoleMismatchError( + relation_name, RelationRole.provides, RelationRole.requires + ) + elif expected_relation_role is RelationRole.requires: + if relation_name not in charm.meta.requires: + raise RelationRoleMismatchError( + relation_name, RelationRole.requires, RelationRole.provides + ) + else: + raise TypeError("Unexpected RelationDirection: {}".format(expected_relation_role)) + + +class RequestEvent(RelationEvent): + """Event emitted when a remote requests a tracing endpoint.""" + + @property + def requested_receivers(self) -> List[ReceiverProtocol]: + """List of receiver protocols that have been requested.""" + relation = self.relation + app = relation.app + if not app: + raise NotReadyError("relation.app is None") + + return TracingRequirerAppData.load(relation.data[app]).receivers + + +class BrokenEvent(RelationBrokenEvent): + """Event emitted when a relation on tracing is broken.""" + + +class TracingEndpointProviderEvents(CharmEvents): + """TracingEndpointProvider events.""" + + request = EventSource(RequestEvent) + broken = EventSource(BrokenEvent) + + +class TracingEndpointProvider(Object): + """Class representing a trace receiver service.""" + + on = TracingEndpointProviderEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + external_url: Optional[str] = None, + relation_name: str = DEFAULT_RELATION_NAME, + ): + """Initialize. + + Args: + charm: a `CharmBase` instance that manages this instance of the Tempo service. + external_url: external address of the node hosting the tempo server, + if an ingress is present. + relation_name: an optional string name of the relation between `charm` + and the Tempo charmed service. The default is "tracing". + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the `tracing` relation + interface. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the `RelationRole.requires` + role. + """ + _validate_relation_by_interface_and_direction( + charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.provides + ) + + super().__init__(charm, relation_name + "tracing-provider") + self._charm = charm + self._external_url = external_url + self._relation_name = relation_name + self.framework.observe( + self._charm.on[relation_name].relation_joined, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_created, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_changed, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_broken, self._on_relation_broken_event + ) + + def _on_relation_broken_event(self, e: RelationBrokenEvent): + """Handle relation broken events.""" + self.on.broken.emit(e.relation) + + def _on_relation_event(self, e: RelationEvent): + """Handle relation created/joined/changed events.""" + if self.is_requirer_ready(e.relation): + self.on.request.emit(e.relation) + + def is_requirer_ready(self, relation: Relation): + """Attempt to determine if requirer has already populated app data.""" + try: + self._get_requested_protocols(relation) + except NotReadyError: + return False + return True + + @staticmethod + def _get_requested_protocols(relation: Relation): + app = relation.app + if not app: + raise NotReadyError("relation.app is None") + + try: + databag = TracingRequirerAppData.load(relation.data[app]) + except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): + logger.info(f"relation {relation} is not ready to talk tracing") + raise NotReadyError() + return databag.receivers + + def requested_protocols(self): + """All receiver protocols that have been requested by our related apps.""" + requested_protocols = set() + for relation in self.relations: + try: + protocols = self._get_requested_protocols(relation) + except NotReadyError: + continue + requested_protocols.update(protocols) + return requested_protocols + + @property + def relations(self) -> List[Relation]: + """All relations active on this endpoint.""" + return self._charm.model.relations[self._relation_name] + + def publish_receivers(self, receivers: Sequence[RawReceiver]): + """Let all requirers know that these receivers are active and listening.""" + if not self._charm.unit.is_leader(): + raise RuntimeError("only leader can do this") + + for relation in self.relations: + try: + TracingProviderAppData( + receivers=[ + Receiver( + url=url, + protocol=ProtocolType( + name=protocol, + type=receiver_protocol_to_transport_protocol[protocol], + ), + ) + for protocol, url in receivers + ], + ).dump(relation.data[self._charm.app]) + + except ModelError as e: + # args are bytes + msg = e.args[0] + if isinstance(msg, bytes): + if msg.startswith( + b"ERROR cannot read relation application settings: permission denied" + ): + logger.error( + f"encountered error {e} while attempting to update_relation_data." + f"The relation must be gone." + ) + continue + raise + + +class EndpointRemovedEvent(RelationBrokenEvent): + """Event representing a change in one of the receiver endpoints.""" + + +class EndpointChangedEvent(_AutoSnapshotEvent): + """Event representing a change in one of the receiver endpoints.""" + + __args__ = ("_receivers",) + + if TYPE_CHECKING: + _receivers = [] # type: List[dict] + + @property + def receivers(self) -> List[Receiver]: + """Cast receivers back from dict.""" + return [Receiver(**i) for i in self._receivers] + + +class TracingEndpointRequirerEvents(CharmEvents): + """TracingEndpointRequirer events.""" + + endpoint_changed = EventSource(EndpointChangedEvent) + endpoint_removed = EventSource(EndpointRemovedEvent) + + +class TracingEndpointRequirer(Object): + """A tracing endpoint for Tempo.""" + + on = TracingEndpointRequirerEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + relation_name: str = DEFAULT_RELATION_NAME, + protocols: Optional[List[ReceiverProtocol]] = None, + ): + """Construct a tracing requirer for a Tempo charm. + + If your application supports pushing traces to a distributed tracing backend, the + `TracingEndpointRequirer` object enables your charm to easily access endpoint information + exchanged over a `tracing` relation interface. + + Args: + charm: a `CharmBase` object that manages this + `TracingEndpointRequirer` object. Typically, this is `self` in the instantiating + class. + relation_name: an optional string name of the relation between `charm` + and the Tempo charmed service. The default is "tracing". It is strongly + advised not to change the default, so that people deploying your charm will have a + consistent experience with all other charms that provide tracing endpoints. + protocols: optional list of protocols that the charm intends to send traces with. + The provider will enable receivers for these and only these protocols, + so be sure to enable all protocols the charm or its workload are going to need. + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the `tracing` relation + interface. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the `RelationRole.provides` + role. + """ + _validate_relation_by_interface_and_direction( + charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.requires + ) + + super().__init__(charm, relation_name) + + self._is_single_endpoint = charm.meta.relations[relation_name].limit == 1 + + self._charm = charm + self._relation_name = relation_name + + events = self._charm.on[self._relation_name] + self.framework.observe(events.relation_changed, self._on_tracing_relation_changed) + self.framework.observe(events.relation_broken, self._on_tracing_relation_broken) + + if protocols: + self.request_protocols(protocols) + + def request_protocols( + self, protocols: Sequence[ReceiverProtocol], relation: Optional[Relation] = None + ): + """Publish the list of protocols which the provider should activate.""" + # todo: should we check if _is_single_endpoint and len(self.relations) > 1 and raise, here? + relations = [relation] if relation else self.relations + + if not protocols: + # empty sequence + raise ValueError( + "You need to pass a nonempty sequence of protocols to `request_protocols`." + ) + + try: + if self._charm.unit.is_leader(): + for relation in relations: + TracingRequirerAppData( + receivers=list(protocols), + ).dump(relation.data[self._charm.app]) + + except ModelError as e: + # args are bytes + msg = e.args[0] + if isinstance(msg, bytes): + if msg.startswith( + b"ERROR cannot read relation application settings: permission denied" + ): + logger.error( + f"encountered error {e} while attempting to request_protocols." + f"The relation must be gone." + ) + return + raise + + @property + def relations(self) -> List[Relation]: + """The tracing relations associated with this endpoint.""" + return self._charm.model.relations[self._relation_name] + + @property + def _relation(self) -> Optional[Relation]: + """If this wraps a single endpoint, the relation bound to it, if any.""" + if not self._is_single_endpoint: + objname = type(self).__name__ + raise AmbiguousRelationUsageError( + f"This {objname} wraps a {self._relation_name} endpoint that has " + "limit != 1. We can't determine what relation, of the possibly many, you are " + f"talking about. Please pass a relation instance while calling {objname}, " + "or set limit=1 in the charm metadata." + ) + relations = self.relations + return relations[0] if relations else None + + def is_ready(self, relation: Optional[Relation] = None): + """Is this endpoint ready?""" + relation = relation or self._relation + if not relation: + logger.debug(f"no relation on {self._relation_name !r}: tracing not ready") + return False + if relation.data is None: + logger.error(f"relation data is None for {relation}") + return False + if not relation.app: + logger.error(f"{relation} event received but there is no relation.app") + return False + try: + databag = dict(relation.data[relation.app]) + TracingProviderAppData.load(databag) + + except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): + logger.info(f"failed validating relation data for {relation}") + return False + return True + + def _on_tracing_relation_changed(self, event): + """Notify the providers that there is new endpoint information available.""" + relation = event.relation + if not self.is_ready(relation): + self.on.endpoint_removed.emit(relation) # type: ignore + return + + data = TracingProviderAppData.load(relation.data[relation.app]) + self.on.endpoint_changed.emit(relation, [i.dict() for i in data.receivers]) # type: ignore + + def _on_tracing_relation_broken(self, event: RelationBrokenEvent): + """Notify the providers that the endpoint is broken.""" + relation = event.relation + self.on.endpoint_removed.emit(relation) # type: ignore + + def get_all_endpoints( + self, relation: Optional[Relation] = None + ) -> Optional[TracingProviderAppData]: + """Unmarshalled relation data.""" + relation = relation or self._relation + if not self.is_ready(relation): + return + return TracingProviderAppData.load(relation.data[relation.app]) # type: ignore + + def _get_endpoint( + self, relation: Optional[Relation], protocol: ReceiverProtocol + ) -> Optional[str]: + app_data = self.get_all_endpoints(relation) + if not app_data: + return None + receivers: List[Receiver] = list( + filter(lambda i: i.protocol.name == protocol, app_data.receivers) + ) + if not receivers: + logger.error(f"no receiver found with protocol={protocol!r}") + return + if len(receivers) > 1: + logger.error( + f"too many receivers with protocol={protocol!r}; using first one. Found: {receivers}" + ) + return + + receiver = receivers[0] + return receiver.url + + def get_endpoint( + self, protocol: ReceiverProtocol, relation: Optional[Relation] = None + ) -> Optional[str]: + """Receiver endpoint for the given protocol. + + It could happen that this function gets called before the provider publishes the endpoints. + In such a scenario, if a non-leader unit calls this function, a permission denied exception will be raised due to + restricted access. To prevent this, this function needs to be guarded by the `is_ready` check. + + Raises: + ProtocolNotRequestedError: + If the charm unit is the leader unit and attempts to obtain an endpoint for a protocol it did not request. + """ + endpoint = self._get_endpoint(relation or self._relation, protocol=protocol) + if not endpoint: + requested_protocols = set() + relations = [relation] if relation else self.relations + for relation in relations: + try: + databag = TracingRequirerAppData.load(relation.data[self._charm.app]) + except DataValidationError: + continue + + requested_protocols.update(databag.receivers) + + if protocol not in requested_protocols: + raise ProtocolNotRequestedError(protocol, relation) + + return None + return endpoint + + +def charm_tracing_config( + endpoint_requirer: TracingEndpointRequirer, cert_path: Optional[Union[Path, str]] +) -> Tuple[Optional[str], Optional[str]]: + """Return the charm_tracing config you likely want. + + If no endpoint is provided: + disable charm tracing. + If https endpoint is provided but cert_path is not found on disk: + disable charm tracing. + If https endpoint is provided and cert_path is None: + ERROR + Else: + proceed with charm tracing (with or without tls, as appropriate) + + Usage: + If you are using charm_tracing >= v1.9: + >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config + >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") + >>> class MyCharm(...): + >>> _cert_path = "/path/to/cert/on/charm/container.crt" + >>> def __init__(self, ...): + >>> self.tracing = TracingEndpointRequirer(...) + >>> self.my_endpoint, self.cert_path = charm_tracing_config( + ... self.tracing, self._cert_path) + + If you are using charm_tracing < v1.9: + >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config + >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") + >>> class MyCharm(...): + >>> _cert_path = "/path/to/cert/on/charm/container.crt" + >>> def __init__(self, ...): + >>> self.tracing = TracingEndpointRequirer(...) + >>> self._my_endpoint, self._cert_path = charm_tracing_config( + ... self.tracing, self._cert_path) + >>> @property + >>> def my_endpoint(self): + >>> return self._my_endpoint + >>> @property + >>> def cert_path(self): + >>> return self._cert_path + + """ + if not endpoint_requirer.is_ready(): + return None, None + + endpoint = endpoint_requirer.get_endpoint("otlp_http") + if not endpoint: + return None, None + + is_https = endpoint.startswith("https://") + + if is_https: + if cert_path is None or not Path(cert_path).exists(): + # disable charm tracing until we obtain a cert to prevent tls errors + logger.error( + "Tracing endpoint is https, but no server_cert has been passed." + "Please point @trace_charm to a `server_cert` attr. " + "This might also mean that the tracing provider is related to a " + "certificates provider, but this application is not (yet). " + "In that case, you might just have to wait a bit for the certificates " + "integration to settle. " + ) + return None, None + return endpoint, str(cert_path) + else: + return endpoint, None diff --git a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py index cf8def1..e12ca2a 100644 --- a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py +++ b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py @@ -227,9 +227,7 @@ def my_tracing_endpoint(self) -> Optional[str]: """ import typing -from opentelemetry.exporter.otlp.proto.common._internal.trace_encoder import ( - encode_spans, -) +from opentelemetry.exporter.otlp.proto.common._internal.trace_encoder import encode_spans from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter @@ -294,29 +292,14 @@ def _remove_stale_otel_sdk_packages(): from contextlib import contextmanager from contextvars import Context, ContextVar, copy_context from pathlib import Path -from typing import ( - Any, - Callable, - Generator, - List, - Optional, - Sequence, - Type, - TypeVar, - Union, - cast, -) +from typing import Any, Callable, Generator, List, Optional, Sequence, Type, TypeVar, Union, cast import opentelemetry import ops from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import ReadableSpan, Span, TracerProvider -from opentelemetry.sdk.trace.export import ( - BatchSpanProcessor, - SpanExporter, - SpanExportResult, -) +from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanExporter, SpanExportResult from opentelemetry.trace import INVALID_SPAN, Tracer from opentelemetry.trace import get_current_span as otlp_get_current_span from opentelemetry.trace import ( diff --git a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py index 2035dff..4516af6 100644 --- a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py +++ b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -91,13 +91,7 @@ def __init__(self, *args): ) import pydantic -from ops.charm import ( - CharmBase, - CharmEvents, - RelationBrokenEvent, - RelationEvent, - RelationRole, -) +from ops.charm import CharmBase, CharmEvents, RelationBrokenEvent, RelationEvent, RelationRole from ops.framework import EventSource, Object from ops.model import ModelError, Relation from pydantic import BaseModel, Field diff --git a/examples/flask/src/charm.py b/examples/flask/src/charm.py index 1b5c207..1274e26 100755 --- a/examples/flask/src/charm.py +++ b/examples/flask/src/charm.py @@ -13,6 +13,7 @@ logger = logging.getLogger(__name__) + class FlaskCharm(paas_charm.flask.Charm): """Flask Charm service.""" diff --git a/examples/flask/test_rock/app.py b/examples/flask/test_rock/app.py index ef0e4d7..c658026 100644 --- a/examples/flask/test_rock/app.py +++ b/examples/flask/test_rock/app.py @@ -20,8 +20,6 @@ import redis from celery import Celery, Task from flask import Flask, g, jsonify, request -from opentelemetry import trace -from opentelemetry.instrumentation.flask import FlaskInstrumentor def hostname(): @@ -56,9 +54,11 @@ def __call__(self, *args: object, **kwargs: object) -> object: app = Flask(__name__) app.config.from_prefixed_env() + +from opentelemetry.instrumentation.flask import FlaskInstrumentor + FlaskInstrumentor().instrument_app(app) -tracer = trace.get_tracer(__name__) broker_url = os.environ.get("REDIS_DB_CONNECT_STRING") # Configure Celery only if Redis is configured celery_app = celery_init_app(app, broker_url) diff --git a/examples/flask/test_rock/requirements.txt b/examples/flask/test_rock/requirements.txt index 7f29329..dde3416 100644 --- a/examples/flask/test_rock/requirements.txt +++ b/examples/flask/test_rock/requirements.txt @@ -9,7 +9,6 @@ boto3 pika celery googleapis-common-protos -grpcio opentelemetry-api opentelemetry-exporter-otlp opentelemetry-exporter-otlp-proto-http diff --git a/src/paas_charm/__init__.py b/src/paas_charm/__init__.py index 2457d3c..8fc5d13 100644 --- a/src/paas_charm/__init__.py +++ b/src/paas_charm/__init__.py @@ -58,3 +58,10 @@ raise exceptions.MissingCharmLibraryError( "Missing charm library, please run `charmcraft fetch-lib charms.redis_k8s.v0.redis`" ) from import_error +try: + import charms.tempo_coordinator_k8s.v0.tracing # noqa: F401 +except ImportError as import_error: + raise exceptions.MissingCharmLibraryError( + "Missing charm library, please run " + "`charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing`" + ) from import_error diff --git a/src/paas_charm/_gunicorn/charm.py b/src/paas_charm/_gunicorn/charm.py index faa3931..7d74b4f 100644 --- a/src/paas_charm/_gunicorn/charm.py +++ b/src/paas_charm/_gunicorn/charm.py @@ -5,14 +5,11 @@ import logging -from ops.pebble import ExecError, ExecProcess - from paas_charm._gunicorn.webserver import GunicornWebserver, WebserverConfig from paas_charm._gunicorn.workload_config import create_workload_config from paas_charm._gunicorn.wsgi_app import WsgiApp from paas_charm.app import App, WorkloadConfig from paas_charm.charm import PaasCharm -from paas_charm.exceptions import CharmConfigInvalidError logger = logging.getLogger(__name__) diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index 72429e4..87853e9 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -130,12 +130,16 @@ def _config(self) -> str: error_log = repr( APPLICATION_ERROR_LOG_FILE_FMT.format(framework=self._workload_config.framework) ) - framework_environments = self._container.get_plan().to_dict()['services'][self._workload_config.framework]['environment'] + framework_environments = self._container.get_plan().to_dict()["services"][ + self._workload_config.framework + ]["environment"] tracing_uri = None tracing_service_name = None - if framework_environments.get('TRACING_URI', None): - tracing_uri = framework_environments['TRACING_URI'] - tracing_service_name = framework_environments['TRACING_SERVICE_NAME'] + if framework_environments.get("OTEL_EXPORTER_OTLP_ENDPOINT", None): + tracing_uri = framework_environments["OTEL_EXPORTER_OTLP_ENDPOINT"] + tracing_service_name = framework_environments["OTEL_SERVICE_NAME"] + # check if opentelemetry stuff are installed but not here. + # if they are installed then use them if not go into blocked state config = textwrap.dedent( f"""\ from opentelemetry import trace diff --git a/src/paas_charm/app.py b/src/paas_charm/app.py index 79990b9..c892148 100644 --- a/src/paas_charm/app.py +++ b/src/paas_charm/app.py @@ -260,7 +260,14 @@ def map_integrations_to_env(integrations: IntegrationsState, prefix: str = "") - interface_envvars = _db_url_to_env_variables(interface_name.upper(), uri) env.update(interface_envvars) if integrations.tracing_relation_data: - env.update(integrations.tracing_relation_data) + env.update( + (k, v) + for k, v in ( + ("OTEL_SERVICE_NAME", integrations.tracing_relation_data.service_name), + ("OTEL_EXPORTER_OTLP_ENDPOINT", integrations.tracing_relation_data.endpoint), + ) + if v is not None + ) if integrations.s3_parameters: s3 = integrations.s3_parameters diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 8f96db8..2fd7287 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -9,13 +9,13 @@ import ops from charms.data_platform_libs.v0.data_interfaces import DatabaseRequiresEvent from charms.redis_k8s.v0.redis import RedisRelationCharmEvents, RedisRequires - +from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer from charms.traefik_k8s.v2.ingress import IngressPerAppRequirer from ops.model import Container from pydantic import BaseModel, ValidationError from paas_charm.app import App, WorkloadConfig -from paas_charm.charm_state import CharmState +from paas_charm.charm_state import CharmState, TempoParameters from paas_charm.charm_utils import block_if_invalid_config from paas_charm.database_migration import DatabaseMigration, DatabaseMigrationStatus from paas_charm.databases import make_database_requirers @@ -45,13 +45,6 @@ "Missing charm library, please run `charmcraft fetch-lib charms.saml_integrator.v0.saml`" ) -try: - # pylint: disable=ungrouped-imports - from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer -except ImportError: - logger.exception( - "Missing charm library, please run `charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing`" - ) class PaasCharm(abc.ABC, ops.CharmBase): # pylint: disable=too-many-instance-attributes """PaasCharm base charm service mixin. @@ -78,6 +71,8 @@ def _create_app(self) -> App: on = RedisRelationCharmEvents() + # pylint: disable=too-many-statements + # disabled because we have too many possible integrations for the workload. def __init__(self, framework: ops.Framework, framework_name: str) -> None: """Initialize the instance. @@ -90,7 +85,7 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self._secret_storage = KeySecretStorage(charm=self, key=f"{framework_name}_secret_key") self._database_requirers = make_database_requirers(self, self.app.name) - requires = self.framework.meta.requires ######************* + requires = self.framework.meta.requires if "redis" in requires and requires["redis"].interface_name == "redis": self._redis = RedisRequires(charm=self, relation_name="redis") self.framework.observe(self.on.redis_relation_updated, self._on_redis_relation_updated) @@ -125,9 +120,16 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self._rabbitmq = None if "tracing" in requires and requires["tracing"].interface_name == "tracing": - self._tracing = TracingEndpointRequirer(self, relation_name="tracing", protocols=["otlp_http"]) - self.framework.observe(self._tracing.on.endpoint_changed, self._on_tracing_relation_changed) - self.framework.observe(self._tracing.on.endpoint_removed, self._on_tracing_relation_broken) + try: + self._tracing = TracingEndpointRequirer( + self, relation_name="tracing", protocols=["otlp_http"] + ) + # add self.framework.observe for relation changed and departed + except NameError: + self.update_app_and_unit_status(ops.BlockedStatus("Can not initialize tracing.")) + self._tracing = None + else: + self._tracing = None self._database_migration = DatabaseMigration( container=self.unit.get_container(self._workload_config.container_name), @@ -278,6 +280,8 @@ def is_ready(self) -> bool: return True + # pylint: disable=too-many-branches + # disabled because we have too many possible integrations for the workload. # Pending to refactor all integrations def _missing_required_integrations(self, charm_state: CharmState) -> list[str]: # noqa: C901 """Get list of missing integrations that are required. @@ -368,8 +372,11 @@ def _create_charm_state(self) -> CharmState: ) tracing_relation_data = None if self._tracing and self._tracing.is_ready(): - tracing_relation_data = {"TRACING_URI": self._tracing.get_endpoint(protocol="otlp_http"), - "TRACING_SERVICE_NAME": f"{self.framework.meta.name}-charm"} + tracing_relation_data = TempoParameters( + endpoint=self._tracing.get_endpoint(protocol="otlp_http"), + service_name=f"{self.framework.meta.name}-charm", + ) + return CharmState.from_charm( config=config, framework=self._framework_name, @@ -495,13 +502,3 @@ def _on_rabbitmq_ready(self, _: ops.HookEvent) -> None: def _on_rabbitmq_departed(self, _: ops.HookEvent) -> None: """Handle rabbitmq departed event.""" self.restart() - - @block_if_invalid_config - def _on_tracing_relation_changed(self, _: ops.HookEvent) -> None: - """Handle tracing relation changed event.""" - self.restart() - - @block_if_invalid_config - def _on_tracing_relation_broken(self, _: ops.HookEvent) -> None: - """Handle tracing relation broken event.""" - self.restart() diff --git a/src/paas_charm/charm_state.py b/src/paas_charm/charm_state.py index 607fcf8..e5f7318 100644 --- a/src/paas_charm/charm_state.py +++ b/src/paas_charm/charm_state.py @@ -89,7 +89,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None = None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, - tracing_relation_data: typing.MutableMapping[str, str] | None = None, + tracing_relation_data: "TempoParameters | None" = None, base_url: str | None = None, ) -> "CharmState": """Initialize a new instance of the CharmState class from the associated charm. @@ -222,7 +222,7 @@ class IntegrationsState: s3_parameters: "S3Parameters | None" = None saml_parameters: "SamlParameters | None" = None rabbitmq_uri: str | None = None - tracing_relation_data: dict[str, str] = field(default_factory=dict) + tracing_relation_data: "TempoParameters | None" = None # This dataclass combines all the integrations, so it is reasonable that they stay together. @classmethod @@ -234,7 +234,7 @@ def build( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, - tracing_relation_data: typing.MutableMapping[str, str] | None = None, + tracing_relation_data: "TempoParameters | None" = None, ) -> "IntegrationsState": """Initialize a new instance of the IntegrationsState class. @@ -299,6 +299,18 @@ def build( # pylint: disable=too-many-arguments ) +class TempoParameters(BaseModel): + """Configuration for accessing S3 bucket. + + Attributes: + endpoint: Tempo endpoint URL to send the traces. + service_name: Tempo service name for the workload. + """ + + endpoint: str | None = None + service_name: str | None = None + + class S3Parameters(BaseModel): """Configuration for accessing S3 bucket. From f9b6b3ceb84725e397dd64636250ab7f795571c1 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 18 Dec 2024 10:44:47 +0300 Subject: [PATCH 09/70] chore(): Cleaning --- examples/django/charm/charmcraft.yaml | 10 +- examples/fastapi/charm/charmcraft.yaml | 10 +- .../tempo_coordinator_k8s/v0/tracing.py | 998 --------------- examples/flask/charmcraft.yaml | 10 +- .../tempo_coordinator_k8s/v0/charm_tracing.py | 1072 ----------------- .../tempo_coordinator_k8s/v0/tracing.py | 998 --------------- examples/flask/requirements.txt | 5 +- examples/flask/test_rock/app.py | 4 - examples/flask/test_rock/requirements.txt | 9 - examples/flask/test_rock/rockcraft.yaml | 7 - src/paas_charm/_gunicorn/charm.py | 1 - 11 files changed, 11 insertions(+), 3113 deletions(-) delete mode 100644 examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py delete mode 100644 examples/flask/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py delete mode 100644 examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py diff --git a/examples/django/charm/charmcraft.yaml b/examples/django/charm/charmcraft.yaml index cf7f0e2..05bf5df 100644 --- a/examples/django/charm/charmcraft.yaml +++ b/examples/django/charm/charmcraft.yaml @@ -50,9 +50,9 @@ config: type: string django-secret-key-id: description: >- - This configuration is similar to `django-secret-key`, but instead accepts a Juju user secret ID. - The secret should contain a single key, "value", which maps to the actual Django secret key. - To create the secret, run the following command: + This configuration is similar to `django-secret-key`, but instead accepts a Juju user secret ID. + The secret should contain a single key, "value", which maps to the actual Django secret key. + To create the secret, run the following command: `juju add-secret my-django-secret-key value= && juju grant-secret my-django-secret-key django-k8s`, and use the outputted secret ID to configure this option. type: secret @@ -90,10 +90,6 @@ requires: interface: postgresql_client optional: False limit: 1 - tracing: - interface: tracing - optional: True - limit: 1 resources: django-app-image: description: django application image. diff --git a/examples/fastapi/charm/charmcraft.yaml b/examples/fastapi/charm/charmcraft.yaml index fbfc34d..fc19128 100644 --- a/examples/fastapi/charm/charmcraft.yaml +++ b/examples/fastapi/charm/charmcraft.yaml @@ -50,9 +50,9 @@ config: app-secret-key-id: type: secret description: >- - This configuration is similar to `app-secret-key`, but instead accepts a Juju user secret ID. - The secret should contain a single key, "value", which maps to the actual secret key. - To create the secret, run the following command: + This configuration is similar to `app-secret-key`, but instead accepts a Juju user secret ID. + The secret should contain a single key, "value", which maps to the actual secret key. + To create the secret, run the following command: `juju add-secret my-secret-key value= && juju grant-secret my-secret-key fastapi-k8s`, and use the outputted secret ID to configure this option. user-defined-config: @@ -79,10 +79,6 @@ requires: interface: postgresql_client optional: True limit: 1 - tracing: - interface: tracing - optional: True - limit: 1 resources: app-image: description: FastAPI application image. diff --git a/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py deleted file mode 100644 index 4516af6..0000000 --- a/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py +++ /dev/null @@ -1,998 +0,0 @@ -# Copyright 2024 Canonical Ltd. -# See LICENSE file for licensing details. -"""## Overview. - -This document explains how to integrate with the Tempo charm for the purpose of pushing traces to a -tracing endpoint provided by Tempo. It also explains how alternative implementations of the Tempo charm -may maintain the same interface and be backward compatible with all currently integrated charms. - -## Requirer Library Usage - -Charms seeking to push traces to Tempo, must do so using the `TracingEndpointRequirer` -object from this charm library. For the simplest use cases, using the `TracingEndpointRequirer` -object only requires instantiating it, typically in the constructor of your charm. The -`TracingEndpointRequirer` constructor requires the name of the relation over which a tracing endpoint - is exposed by the Tempo charm, and a list of protocols it intends to send traces with. - This relation must use the `tracing` interface. - The `TracingEndpointRequirer` object may be instantiated as follows - - from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer - - def __init__(self, *args): - super().__init__(*args) - # ... - self.tracing = TracingEndpointRequirer(self, - protocols=['otlp_grpc', 'otlp_http', 'jaeger_http_thrift'] - ) - # ... - -Note that the first argument (`self`) to `TracingEndpointRequirer` is always a reference to the -parent charm. - -Alternatively to providing the list of requested protocols at init time, the charm can do it at -any point in time by calling the -`TracingEndpointRequirer.request_protocols(*protocol:str, relation:Optional[Relation])` method. -Using this method also allows you to use per-relation protocols. - -Units of requirer charms obtain the tempo endpoint to which they will push their traces by calling -`TracingEndpointRequirer.get_endpoint(protocol: str)`, where `protocol` is, for example: -- `otlp_grpc` -- `otlp_http` -- `zipkin` -- `tempo` - -If the `protocol` is not in the list of protocols that the charm requested at endpoint set-up time, -the library will raise an error. - -We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests -go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down. - -## Provider Library Usage - -The `TracingEndpointProvider` object may be used by charms to manage relations with their -trace sources. For this purposes a Tempo-like charm needs to do two things - -1. Instantiate the `TracingEndpointProvider` object by providing it a -reference to the parent (Tempo) charm and optionally the name of the relation that the Tempo charm -uses to interact with its trace sources. This relation must conform to the `tracing` interface -and it is strongly recommended that this relation be named `tracing` which is its -default value. - -For example a Tempo charm may instantiate the `TracingEndpointProvider` in its constructor as -follows - - from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointProvider - - def __init__(self, *args): - super().__init__(*args) - # ... - self.tracing = TracingEndpointProvider(self) - # ... - - - -""" # noqa: W505 -import enum -import json -import logging -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Dict, - List, - Literal, - MutableMapping, - Optional, - Sequence, - Tuple, - Union, - cast, -) - -import pydantic -from ops.charm import CharmBase, CharmEvents, RelationBrokenEvent, RelationEvent, RelationRole -from ops.framework import EventSource, Object -from ops.model import ModelError, Relation -from pydantic import BaseModel, Field - -# The unique Charmhub library identifier, never change it -LIBID = "d2f02b1f8d1244b5989fd55bc3a28943" - -# Increment this major API version when introducing breaking changes -LIBAPI = 0 - -# Increment this PATCH version before using `charmcraft publish-lib` or reset -# to 0 if you are raising the major API version -LIBPATCH = 3 - -PYDEPS = ["pydantic"] - -logger = logging.getLogger(__name__) - -DEFAULT_RELATION_NAME = "tracing" -RELATION_INTERFACE_NAME = "tracing" - -# Supported list rationale https://github.com/canonical/tempo-coordinator-k8s-operator/issues/8 -ReceiverProtocol = Literal[ - "zipkin", - "otlp_grpc", - "otlp_http", - "jaeger_grpc", - "jaeger_thrift_http", -] - -RawReceiver = Tuple[ReceiverProtocol, str] -"""Helper type. A raw receiver is defined as a tuple consisting of the protocol name, and the (external, if available), -(secured, if available) resolvable server url. -""" - -BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"} - - -class TransportProtocolType(str, enum.Enum): - """Receiver Type.""" - - http = "http" - grpc = "grpc" - - -receiver_protocol_to_transport_protocol: Dict[ReceiverProtocol, TransportProtocolType] = { - "zipkin": TransportProtocolType.http, - "otlp_grpc": TransportProtocolType.grpc, - "otlp_http": TransportProtocolType.http, - "jaeger_thrift_http": TransportProtocolType.http, - "jaeger_grpc": TransportProtocolType.grpc, -} -"""A mapping between telemetry protocols and their corresponding transport protocol. -""" - - -class TracingError(Exception): - """Base class for custom errors raised by this library.""" - - -class NotReadyError(TracingError): - """Raised by the provider wrapper if a requirer hasn't published the required data (yet).""" - - -class ProtocolNotRequestedError(TracingError): - """Raised if the user attempts to obtain an endpoint for a protocol it did not request.""" - - -class DataValidationError(TracingError): - """Raised when data validation fails on IPU relation data.""" - - -class AmbiguousRelationUsageError(TracingError): - """Raised when one wrongly assumes that there can only be one relation on an endpoint.""" - - -if int(pydantic.version.VERSION.split(".")[0]) < 2: - - class DatabagModel(BaseModel): # type: ignore - """Base databag model.""" - - class Config: - """Pydantic config.""" - - # ignore any extra fields in the databag - extra = "ignore" - """Ignore any extra fields in the databag.""" - allow_population_by_field_name = True - """Allow instantiating this class by field name (instead of forcing alias).""" - - _NEST_UNDER = None - - @classmethod - def load(cls, databag: MutableMapping): - """Load this model from a Juju databag.""" - if cls._NEST_UNDER: - return cls.parse_obj(json.loads(databag[cls._NEST_UNDER])) - - try: - data = { - k: json.loads(v) - for k, v in databag.items() - # Don't attempt to parse model-external values - if k in {f.alias for f in cls.__fields__.values()} - } - except json.JSONDecodeError as e: - msg = f"invalid databag contents: expecting json. {databag}" - logger.error(msg) - raise DataValidationError(msg) from e - - try: - return cls.parse_raw(json.dumps(data)) # type: ignore - except pydantic.ValidationError as e: - msg = f"failed to validate databag: {databag}" - logger.debug(msg, exc_info=True) - raise DataValidationError(msg) from e - - def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): - """Write the contents of this model to Juju databag. - - :param databag: the databag to write the data to. - :param clear: ensure the databag is cleared before writing it. - """ - if clear and databag: - databag.clear() - - if databag is None: - databag = {} - - if self._NEST_UNDER: - databag[self._NEST_UNDER] = self.json(by_alias=True) - return databag - - dct = self.dict() - for key, field in self.__fields__.items(): # type: ignore - value = dct[key] - databag[field.alias or key] = json.dumps(value) - - return databag - -else: - from pydantic import ConfigDict - - class DatabagModel(BaseModel): - """Base databag model.""" - - model_config = ConfigDict( - # ignore any extra fields in the databag - extra="ignore", - # Allow instantiating this class by field name (instead of forcing alias). - populate_by_name=True, - # Custom config key: whether to nest the whole datastructure (as json) - # under a field or spread it out at the toplevel. - _NEST_UNDER=None, # type: ignore - ) - """Pydantic config.""" - - @classmethod - def load(cls, databag: MutableMapping): - """Load this model from a Juju databag.""" - nest_under = cls.model_config.get("_NEST_UNDER") # type: ignore - if nest_under: - return cls.model_validate(json.loads(databag[nest_under])) # type: ignore - - try: - data = { - k: json.loads(v) - for k, v in databag.items() - # Don't attempt to parse model-external values - if k in {(f.alias or n) for n, f in cls.__fields__.items()} - } - except json.JSONDecodeError as e: - msg = f"invalid databag contents: expecting json. {databag}" - logger.error(msg) - raise DataValidationError(msg) from e - - try: - return cls.model_validate_json(json.dumps(data)) # type: ignore - except pydantic.ValidationError as e: - msg = f"failed to validate databag: {databag}" - logger.debug(msg, exc_info=True) - raise DataValidationError(msg) from e - - def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): - """Write the contents of this model to Juju databag. - - :param databag: the databag to write the data to. - :param clear: ensure the databag is cleared before writing it. - """ - if clear and databag: - databag.clear() - - if databag is None: - databag = {} - nest_under = self.model_config.get("_NEST_UNDER") - if nest_under: - databag[nest_under] = self.model_dump_json( # type: ignore - by_alias=True, - # skip keys whose values are default - exclude_defaults=True, - ) - return databag - - dct = self.model_dump() # type: ignore - for key, field in self.model_fields.items(): # type: ignore - value = dct[key] - if value == field.default: - continue - databag[field.alias or key] = json.dumps(value) - - return databag - - -# todo use models from charm-relation-interfaces -if int(pydantic.version.VERSION.split(".")[0]) < 2: - - class ProtocolType(BaseModel): # type: ignore - """Protocol Type.""" - - class Config: - """Pydantic config.""" - - use_enum_values = True - """Allow serializing enum values.""" - - name: str = Field( - ..., - description="Receiver protocol name. What protocols are supported (and what they are called) " - "may differ per provider.", - examples=["otlp_grpc", "otlp_http", "tempo_http"], - ) - - type: TransportProtocolType = Field( - ..., - description="The transport protocol used by this receiver.", - examples=["http", "grpc"], - ) - -else: - - class ProtocolType(BaseModel): - """Protocol Type.""" - - model_config = ConfigDict( # type: ignore - # Allow serializing enum values. - use_enum_values=True - ) - """Pydantic config.""" - - name: str = Field( - ..., - description="Receiver protocol name. What protocols are supported (and what they are called) " - "may differ per provider.", - examples=["otlp_grpc", "otlp_http", "tempo_http"], - ) - - type: TransportProtocolType = Field( - ..., - description="The transport protocol used by this receiver.", - examples=["http", "grpc"], - ) - - -class Receiver(BaseModel): - """Specification of an active receiver.""" - - protocol: ProtocolType = Field(..., description="Receiver protocol name and type.") - url: str = Field( - ..., - description="""URL at which the receiver is reachable. If there's an ingress, it would be the external URL. - Otherwise, it would be the service's fqdn or internal IP. - If the protocol type is grpc, the url will not contain a scheme.""", - examples=[ - "http://traefik_address:2331", - "https://traefik_address:2331", - "http://tempo_public_ip:2331", - "https://tempo_public_ip:2331", - "tempo_public_ip:2331", - ], - ) - - -class TracingProviderAppData(DatabagModel): # noqa: D101 - """Application databag model for the tracing provider.""" - - receivers: List[Receiver] = Field( - ..., - description="List of all receivers enabled on the tracing provider.", - ) - - -class TracingRequirerAppData(DatabagModel): # noqa: D101 - """Application databag model for the tracing requirer.""" - - receivers: List[ReceiverProtocol] - """Requested receivers.""" - - -class _AutoSnapshotEvent(RelationEvent): - __args__: Tuple[str, ...] = () - __optional_kwargs__: Dict[str, Any] = {} - - @classmethod - def __attrs__(cls): - return cls.__args__ + tuple(cls.__optional_kwargs__.keys()) - - def __init__(self, handle, relation, *args, **kwargs): - super().__init__(handle, relation) - - if not len(self.__args__) == len(args): - raise TypeError("expected {} args, got {}".format(len(self.__args__), len(args))) - - for attr, obj in zip(self.__args__, args): - setattr(self, attr, obj) - for attr, default in self.__optional_kwargs__.items(): - obj = kwargs.get(attr, default) - setattr(self, attr, obj) - - def snapshot(self) -> dict: - dct = super().snapshot() - for attr in self.__attrs__(): - obj = getattr(self, attr) - try: - dct[attr] = obj - except ValueError as e: - raise ValueError( - "cannot automagically serialize {}: " - "override this method and do it " - "manually.".format(obj) - ) from e - - return dct - - def restore(self, snapshot: dict) -> None: - super().restore(snapshot) - for attr, obj in snapshot.items(): - setattr(self, attr, obj) - - -class RelationNotFoundError(Exception): - """Raised if no relation with the given name is found.""" - - def __init__(self, relation_name: str): - self.relation_name = relation_name - self.message = "No relation named '{}' found".format(relation_name) - super().__init__(self.message) - - -class RelationInterfaceMismatchError(Exception): - """Raised if the relation with the given name has an unexpected interface.""" - - def __init__( - self, - relation_name: str, - expected_relation_interface: str, - actual_relation_interface: str, - ): - self.relation_name = relation_name - self.expected_relation_interface = expected_relation_interface - self.actual_relation_interface = actual_relation_interface - self.message = ( - "The '{}' relation has '{}' as interface rather than the expected '{}'".format( - relation_name, actual_relation_interface, expected_relation_interface - ) - ) - - super().__init__(self.message) - - -class RelationRoleMismatchError(Exception): - """Raised if the relation with the given name has a different role than expected.""" - - def __init__( - self, - relation_name: str, - expected_relation_role: RelationRole, - actual_relation_role: RelationRole, - ): - self.relation_name = relation_name - self.expected_relation_interface = expected_relation_role - self.actual_relation_role = actual_relation_role - self.message = "The '{}' relation has role '{}' rather than the expected '{}'".format( - relation_name, repr(actual_relation_role), repr(expected_relation_role) - ) - - super().__init__(self.message) - - -def _validate_relation_by_interface_and_direction( - charm: CharmBase, - relation_name: str, - expected_relation_interface: str, - expected_relation_role: RelationRole, -): - """Validate a relation. - - Verifies that the `relation_name` provided: (1) exists in metadata.yaml, - (2) declares as interface the interface name passed as `relation_interface` - and (3) has the right "direction", i.e., it is a relation that `charm` - provides or requires. - - Args: - charm: a `CharmBase` object to scan for the matching relation. - relation_name: the name of the relation to be verified. - expected_relation_interface: the interface name to be matched by the - relation named `relation_name`. - expected_relation_role: whether the `relation_name` must be either - provided or required by `charm`. - - Raises: - RelationNotFoundError: If there is no relation in the charm's metadata.yaml - with the same name as provided via `relation_name` argument. - RelationInterfaceMismatchError: The relation with the same name as provided - via `relation_name` argument does not have the same relation interface - as specified via the `expected_relation_interface` argument. - RelationRoleMismatchError: If the relation with the same name as provided - via `relation_name` argument does not have the same role as specified - via the `expected_relation_role` argument. - """ - if relation_name not in charm.meta.relations: - raise RelationNotFoundError(relation_name) - - relation = charm.meta.relations[relation_name] - - # fixme: why do we need to cast here? - actual_relation_interface = cast(str, relation.interface_name) - - if actual_relation_interface != expected_relation_interface: - raise RelationInterfaceMismatchError( - relation_name, expected_relation_interface, actual_relation_interface - ) - - if expected_relation_role is RelationRole.provides: - if relation_name not in charm.meta.provides: - raise RelationRoleMismatchError( - relation_name, RelationRole.provides, RelationRole.requires - ) - elif expected_relation_role is RelationRole.requires: - if relation_name not in charm.meta.requires: - raise RelationRoleMismatchError( - relation_name, RelationRole.requires, RelationRole.provides - ) - else: - raise TypeError("Unexpected RelationDirection: {}".format(expected_relation_role)) - - -class RequestEvent(RelationEvent): - """Event emitted when a remote requests a tracing endpoint.""" - - @property - def requested_receivers(self) -> List[ReceiverProtocol]: - """List of receiver protocols that have been requested.""" - relation = self.relation - app = relation.app - if not app: - raise NotReadyError("relation.app is None") - - return TracingRequirerAppData.load(relation.data[app]).receivers - - -class BrokenEvent(RelationBrokenEvent): - """Event emitted when a relation on tracing is broken.""" - - -class TracingEndpointProviderEvents(CharmEvents): - """TracingEndpointProvider events.""" - - request = EventSource(RequestEvent) - broken = EventSource(BrokenEvent) - - -class TracingEndpointProvider(Object): - """Class representing a trace receiver service.""" - - on = TracingEndpointProviderEvents() # type: ignore - - def __init__( - self, - charm: CharmBase, - external_url: Optional[str] = None, - relation_name: str = DEFAULT_RELATION_NAME, - ): - """Initialize. - - Args: - charm: a `CharmBase` instance that manages this instance of the Tempo service. - external_url: external address of the node hosting the tempo server, - if an ingress is present. - relation_name: an optional string name of the relation between `charm` - and the Tempo charmed service. The default is "tracing". - - Raises: - RelationNotFoundError: If there is no relation in the charm's metadata.yaml - with the same name as provided via `relation_name` argument. - RelationInterfaceMismatchError: The relation with the same name as provided - via `relation_name` argument does not have the `tracing` relation - interface. - RelationRoleMismatchError: If the relation with the same name as provided - via `relation_name` argument does not have the `RelationRole.requires` - role. - """ - _validate_relation_by_interface_and_direction( - charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.provides - ) - - super().__init__(charm, relation_name + "tracing-provider") - self._charm = charm - self._external_url = external_url - self._relation_name = relation_name - self.framework.observe( - self._charm.on[relation_name].relation_joined, self._on_relation_event - ) - self.framework.observe( - self._charm.on[relation_name].relation_created, self._on_relation_event - ) - self.framework.observe( - self._charm.on[relation_name].relation_changed, self._on_relation_event - ) - self.framework.observe( - self._charm.on[relation_name].relation_broken, self._on_relation_broken_event - ) - - def _on_relation_broken_event(self, e: RelationBrokenEvent): - """Handle relation broken events.""" - self.on.broken.emit(e.relation) - - def _on_relation_event(self, e: RelationEvent): - """Handle relation created/joined/changed events.""" - if self.is_requirer_ready(e.relation): - self.on.request.emit(e.relation) - - def is_requirer_ready(self, relation: Relation): - """Attempt to determine if requirer has already populated app data.""" - try: - self._get_requested_protocols(relation) - except NotReadyError: - return False - return True - - @staticmethod - def _get_requested_protocols(relation: Relation): - app = relation.app - if not app: - raise NotReadyError("relation.app is None") - - try: - databag = TracingRequirerAppData.load(relation.data[app]) - except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): - logger.info(f"relation {relation} is not ready to talk tracing") - raise NotReadyError() - return databag.receivers - - def requested_protocols(self): - """All receiver protocols that have been requested by our related apps.""" - requested_protocols = set() - for relation in self.relations: - try: - protocols = self._get_requested_protocols(relation) - except NotReadyError: - continue - requested_protocols.update(protocols) - return requested_protocols - - @property - def relations(self) -> List[Relation]: - """All relations active on this endpoint.""" - return self._charm.model.relations[self._relation_name] - - def publish_receivers(self, receivers: Sequence[RawReceiver]): - """Let all requirers know that these receivers are active and listening.""" - if not self._charm.unit.is_leader(): - raise RuntimeError("only leader can do this") - - for relation in self.relations: - try: - TracingProviderAppData( - receivers=[ - Receiver( - url=url, - protocol=ProtocolType( - name=protocol, - type=receiver_protocol_to_transport_protocol[protocol], - ), - ) - for protocol, url in receivers - ], - ).dump(relation.data[self._charm.app]) - - except ModelError as e: - # args are bytes - msg = e.args[0] - if isinstance(msg, bytes): - if msg.startswith( - b"ERROR cannot read relation application settings: permission denied" - ): - logger.error( - f"encountered error {e} while attempting to update_relation_data." - f"The relation must be gone." - ) - continue - raise - - -class EndpointRemovedEvent(RelationBrokenEvent): - """Event representing a change in one of the receiver endpoints.""" - - -class EndpointChangedEvent(_AutoSnapshotEvent): - """Event representing a change in one of the receiver endpoints.""" - - __args__ = ("_receivers",) - - if TYPE_CHECKING: - _receivers = [] # type: List[dict] - - @property - def receivers(self) -> List[Receiver]: - """Cast receivers back from dict.""" - return [Receiver(**i) for i in self._receivers] - - -class TracingEndpointRequirerEvents(CharmEvents): - """TracingEndpointRequirer events.""" - - endpoint_changed = EventSource(EndpointChangedEvent) - endpoint_removed = EventSource(EndpointRemovedEvent) - - -class TracingEndpointRequirer(Object): - """A tracing endpoint for Tempo.""" - - on = TracingEndpointRequirerEvents() # type: ignore - - def __init__( - self, - charm: CharmBase, - relation_name: str = DEFAULT_RELATION_NAME, - protocols: Optional[List[ReceiverProtocol]] = None, - ): - """Construct a tracing requirer for a Tempo charm. - - If your application supports pushing traces to a distributed tracing backend, the - `TracingEndpointRequirer` object enables your charm to easily access endpoint information - exchanged over a `tracing` relation interface. - - Args: - charm: a `CharmBase` object that manages this - `TracingEndpointRequirer` object. Typically, this is `self` in the instantiating - class. - relation_name: an optional string name of the relation between `charm` - and the Tempo charmed service. The default is "tracing". It is strongly - advised not to change the default, so that people deploying your charm will have a - consistent experience with all other charms that provide tracing endpoints. - protocols: optional list of protocols that the charm intends to send traces with. - The provider will enable receivers for these and only these protocols, - so be sure to enable all protocols the charm or its workload are going to need. - - Raises: - RelationNotFoundError: If there is no relation in the charm's metadata.yaml - with the same name as provided via `relation_name` argument. - RelationInterfaceMismatchError: The relation with the same name as provided - via `relation_name` argument does not have the `tracing` relation - interface. - RelationRoleMismatchError: If the relation with the same name as provided - via `relation_name` argument does not have the `RelationRole.provides` - role. - """ - _validate_relation_by_interface_and_direction( - charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.requires - ) - - super().__init__(charm, relation_name) - - self._is_single_endpoint = charm.meta.relations[relation_name].limit == 1 - - self._charm = charm - self._relation_name = relation_name - - events = self._charm.on[self._relation_name] - self.framework.observe(events.relation_changed, self._on_tracing_relation_changed) - self.framework.observe(events.relation_broken, self._on_tracing_relation_broken) - - if protocols: - self.request_protocols(protocols) - - def request_protocols( - self, protocols: Sequence[ReceiverProtocol], relation: Optional[Relation] = None - ): - """Publish the list of protocols which the provider should activate.""" - # todo: should we check if _is_single_endpoint and len(self.relations) > 1 and raise, here? - relations = [relation] if relation else self.relations - - if not protocols: - # empty sequence - raise ValueError( - "You need to pass a nonempty sequence of protocols to `request_protocols`." - ) - - try: - if self._charm.unit.is_leader(): - for relation in relations: - TracingRequirerAppData( - receivers=list(protocols), - ).dump(relation.data[self._charm.app]) - - except ModelError as e: - # args are bytes - msg = e.args[0] - if isinstance(msg, bytes): - if msg.startswith( - b"ERROR cannot read relation application settings: permission denied" - ): - logger.error( - f"encountered error {e} while attempting to request_protocols." - f"The relation must be gone." - ) - return - raise - - @property - def relations(self) -> List[Relation]: - """The tracing relations associated with this endpoint.""" - return self._charm.model.relations[self._relation_name] - - @property - def _relation(self) -> Optional[Relation]: - """If this wraps a single endpoint, the relation bound to it, if any.""" - if not self._is_single_endpoint: - objname = type(self).__name__ - raise AmbiguousRelationUsageError( - f"This {objname} wraps a {self._relation_name} endpoint that has " - "limit != 1. We can't determine what relation, of the possibly many, you are " - f"talking about. Please pass a relation instance while calling {objname}, " - "or set limit=1 in the charm metadata." - ) - relations = self.relations - return relations[0] if relations else None - - def is_ready(self, relation: Optional[Relation] = None): - """Is this endpoint ready?""" - relation = relation or self._relation - if not relation: - logger.debug(f"no relation on {self._relation_name !r}: tracing not ready") - return False - if relation.data is None: - logger.error(f"relation data is None for {relation}") - return False - if not relation.app: - logger.error(f"{relation} event received but there is no relation.app") - return False - try: - databag = dict(relation.data[relation.app]) - TracingProviderAppData.load(databag) - - except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): - logger.info(f"failed validating relation data for {relation}") - return False - return True - - def _on_tracing_relation_changed(self, event): - """Notify the providers that there is new endpoint information available.""" - relation = event.relation - if not self.is_ready(relation): - self.on.endpoint_removed.emit(relation) # type: ignore - return - - data = TracingProviderAppData.load(relation.data[relation.app]) - self.on.endpoint_changed.emit(relation, [i.dict() for i in data.receivers]) # type: ignore - - def _on_tracing_relation_broken(self, event: RelationBrokenEvent): - """Notify the providers that the endpoint is broken.""" - relation = event.relation - self.on.endpoint_removed.emit(relation) # type: ignore - - def get_all_endpoints( - self, relation: Optional[Relation] = None - ) -> Optional[TracingProviderAppData]: - """Unmarshalled relation data.""" - relation = relation or self._relation - if not self.is_ready(relation): - return - return TracingProviderAppData.load(relation.data[relation.app]) # type: ignore - - def _get_endpoint( - self, relation: Optional[Relation], protocol: ReceiverProtocol - ) -> Optional[str]: - app_data = self.get_all_endpoints(relation) - if not app_data: - return None - receivers: List[Receiver] = list( - filter(lambda i: i.protocol.name == protocol, app_data.receivers) - ) - if not receivers: - logger.error(f"no receiver found with protocol={protocol!r}") - return - if len(receivers) > 1: - logger.error( - f"too many receivers with protocol={protocol!r}; using first one. Found: {receivers}" - ) - return - - receiver = receivers[0] - return receiver.url - - def get_endpoint( - self, protocol: ReceiverProtocol, relation: Optional[Relation] = None - ) -> Optional[str]: - """Receiver endpoint for the given protocol. - - It could happen that this function gets called before the provider publishes the endpoints. - In such a scenario, if a non-leader unit calls this function, a permission denied exception will be raised due to - restricted access. To prevent this, this function needs to be guarded by the `is_ready` check. - - Raises: - ProtocolNotRequestedError: - If the charm unit is the leader unit and attempts to obtain an endpoint for a protocol it did not request. - """ - endpoint = self._get_endpoint(relation or self._relation, protocol=protocol) - if not endpoint: - requested_protocols = set() - relations = [relation] if relation else self.relations - for relation in relations: - try: - databag = TracingRequirerAppData.load(relation.data[self._charm.app]) - except DataValidationError: - continue - - requested_protocols.update(databag.receivers) - - if protocol not in requested_protocols: - raise ProtocolNotRequestedError(protocol, relation) - - return None - return endpoint - - -def charm_tracing_config( - endpoint_requirer: TracingEndpointRequirer, cert_path: Optional[Union[Path, str]] -) -> Tuple[Optional[str], Optional[str]]: - """Return the charm_tracing config you likely want. - - If no endpoint is provided: - disable charm tracing. - If https endpoint is provided but cert_path is not found on disk: - disable charm tracing. - If https endpoint is provided and cert_path is None: - ERROR - Else: - proceed with charm tracing (with or without tls, as appropriate) - - Usage: - If you are using charm_tracing >= v1.9: - >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm - >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config - >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") - >>> class MyCharm(...): - >>> _cert_path = "/path/to/cert/on/charm/container.crt" - >>> def __init__(self, ...): - >>> self.tracing = TracingEndpointRequirer(...) - >>> self.my_endpoint, self.cert_path = charm_tracing_config( - ... self.tracing, self._cert_path) - - If you are using charm_tracing < v1.9: - >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm - >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config - >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") - >>> class MyCharm(...): - >>> _cert_path = "/path/to/cert/on/charm/container.crt" - >>> def __init__(self, ...): - >>> self.tracing = TracingEndpointRequirer(...) - >>> self._my_endpoint, self._cert_path = charm_tracing_config( - ... self.tracing, self._cert_path) - >>> @property - >>> def my_endpoint(self): - >>> return self._my_endpoint - >>> @property - >>> def cert_path(self): - >>> return self._cert_path - - """ - if not endpoint_requirer.is_ready(): - return None, None - - endpoint = endpoint_requirer.get_endpoint("otlp_http") - if not endpoint: - return None, None - - is_https = endpoint.startswith("https://") - - if is_https: - if cert_path is None or not Path(cert_path).exists(): - # disable charm tracing until we obtain a cert to prevent tls errors - logger.error( - "Tracing endpoint is https, but no server_cert has been passed." - "Please point @trace_charm to a `server_cert` attr. " - "This might also mean that the tracing provider is related to a " - "certificates provider, but this application is not (yet). " - "In that case, you might just have to wait a bit for the certificates " - "integration to settle. " - ) - return None, None - return endpoint, str(cert_path) - else: - return endpoint, None diff --git a/examples/flask/charmcraft.yaml b/examples/flask/charmcraft.yaml index 83b86b2..292dba0 100644 --- a/examples/flask/charmcraft.yaml +++ b/examples/flask/charmcraft.yaml @@ -57,9 +57,9 @@ config: type: string flask-secret-key-id: description: >- - This configuration is similar to `flask-secret-key`, but instead accepts a Juju user secret ID. - The secret should contain a single key, "value", which maps to the actual Flask secret key. - To create the secret, run the following command: + This configuration is similar to `flask-secret-key`, but instead accepts a Juju user secret ID. + The secret should contain a single key, "value", which maps to the actual Flask secret key. + To create the secret, run the following command: `juju add-secret my-flask-secret-key value= && juju grant-secret my-flask-secret-key flask-k8s`, and use the outputted secret ID to configure this option. type: secret @@ -130,10 +130,6 @@ requires: interface: rabbitmq optional: True limit: 1 - tracing: - interface: tracing - limit: 1 - optional: true resources: flask-app-image: diff --git a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py deleted file mode 100644 index e12ca2a..0000000 --- a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py +++ /dev/null @@ -1,1072 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2022 Canonical Ltd. -# See LICENSE file for licensing details. - -"""This charm library contains utilities to instrument your Charm with opentelemetry tracing data collection. - -(yes! charm code, not workload code!) - -This means that, if your charm is related to, for example, COS' Tempo charm, you will be able to inspect -in real time from the Grafana dashboard the execution flow of your charm. - -# Quickstart -Fetch the following charm libs (and ensure the minimum version/revision numbers are satisfied): - - charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing # >= 1.10 - charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.charm_tracing # >= 2.7 - -Then edit your charm code to include: - -```python -# import the necessary charm libs -from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer, charm_tracing_config -from charms.tempo_coordinator_k8s.v0.charm_tracing import charm_tracing - -# decorate your charm class with charm_tracing: -@charm_tracing( - # forward-declare the instance attributes that the instrumentor will look up to obtain the - # tempo endpoint and server certificate - tracing_endpoint="tracing_endpoint", - server_cert="server_cert" -) -class MyCharm(CharmBase): - _path_to_cert = "/path/to/cert.crt" - # path to cert file **in the charm container**. Its presence will be used to determine whether - # the charm is ready to use tls for encrypting charm traces. If your charm does not support tls, - # you can ignore this and pass None to charm_tracing_config. - # If you do support TLS, you'll need to make sure that the server cert is copied to this location - # and kept up to date so the instrumentor can use it. - - def __init__(self, ...): - ... - self.tracing = TracingEndpointRequirer(self, ...) - self.tracing_endpoint, self.server_cert = charm_tracing_config(self.tracing, self._path_to_cert) -``` - -# Detailed usage -To use this library, you need to do two things: -1) decorate your charm class with - -`@trace_charm(tracing_endpoint="my_tracing_endpoint")` - -2) add to your charm a "my_tracing_endpoint" (you can name this attribute whatever you like) -**property**, **method** or **instance attribute** that returns an otlp http/https endpoint url. -If you are using the ``charms.tempo_coordinator_k8s.v0.tracing.TracingEndpointRequirer`` as -``self.tracing = TracingEndpointRequirer(self)``, the implementation could be: - -``` - @property - def my_tracing_endpoint(self) -> Optional[str]: - '''Tempo endpoint for charm tracing''' - if self.tracing.is_ready(): - return self.tracing.get_endpoint("otlp_http") - else: - return None -``` - -At this point your charm will be automatically instrumented so that: -- charm execution starts a trace, containing - - every event as a span (including custom events) - - every charm method call (except dunders) as a span - -We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests -go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down. - - -## TLS support -If your charm integrates with a TLS provider which is also trusted by the tracing provider (the Tempo charm), -you can configure ``charm_tracing`` to use TLS by passing a ``server_cert`` parameter to the decorator. - -If your charm is not trusting the same CA as the Tempo endpoint it is sending traces to, -you'll need to implement a cert-transfer relation to obtain the CA certificate from the same -CA that Tempo is using. - -For example: -``` -from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm -@trace_charm( - tracing_endpoint="my_tracing_endpoint", - server_cert="_server_cert" -) -class MyCharm(CharmBase): - self._server_cert = "/path/to/server.crt" - ... - - def on_tls_changed(self, e) -> Optional[str]: - # update the server cert on the charm container for charm tracing - Path(self._server_cert).write_text(self.get_server_cert()) - - def on_tls_broken(self, e) -> Optional[str]: - # remove the server cert so charm_tracing won't try to use tls anymore - Path(self._server_cert).unlink() -``` - - -## More fine-grained manual instrumentation -if you wish to add more spans to the trace, you can do so by getting a hold of the tracer like so: -``` -import opentelemetry -... -def get_tracer(self) -> opentelemetry.trace.Tracer: - return opentelemetry.trace.get_tracer(type(self).__name__) -``` - -By default, the tracer is named after the charm type. If you wish to override that, you can pass -a different ``service_name`` argument to ``trace_charm``. - -See the official opentelemetry Python SDK documentation for usage: -https://opentelemetry-python.readthedocs.io/en/latest/ - - -## Caching traces -The `trace_charm` machinery will buffer any traces collected during charm execution and store them -to a file on the charm container until a tracing backend becomes available. At that point, it will -flush them to the tracing receiver. - -By default, the buffer is configured to start dropping old traces if any of these conditions apply: - -- the storage size exceeds 10 MiB -- the number of buffered events exceeds 100 - -You can configure this by, for example: - -```python -@trace_charm( - tracing_endpoint="my_tracing_endpoint", - server_cert="_server_cert", - # only cache up to 42 events - buffer_max_events=42, - # only cache up to 42 MiB - buffer_max_size_mib=42, # minimum 10! -) -class MyCharm(CharmBase): - ... -``` - -Note that setting `buffer_max_events` to 0 will effectively disable the buffer. - -The path of the buffer file is by default in the charm's execution root, which for k8s charms means -that in case of pod churn, the cache will be lost. The recommended solution is to use an existing storage -(or add a new one) such as: - -```yaml -storage: - data: - type: filesystem - location: /charm-traces -``` - -and then configure the `@trace_charm` decorator to use it as path for storing the buffer: -```python -@trace_charm( - tracing_endpoint="my_tracing_endpoint", - server_cert="_server_cert", - # store traces to a PVC so they're not lost on pod restart. - buffer_path="/charm-traces/buffer.file", -) -class MyCharm(CharmBase): - ... -``` - -## Upgrading from `v0` - -If you are upgrading from `charm_tracing` v0, you need to take the following steps (assuming you already -have the newest version of the library in your charm): -1) If you need the dependency for your tests, add the following dependency to your charm project -(or, if your project had a dependency on `opentelemetry-exporter-otlp-proto-grpc` only because -of `charm_tracing` v0, you can replace it with): - -`opentelemetry-exporter-otlp-proto-http>=1.21.0`. - -2) Update the charm method referenced to from ``@trace`` and ``@trace_charm``, -to return from ``TracingEndpointRequirer.get_endpoint("otlp_http")`` instead of ``grpc_http``. -For example: - -``` - from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm - - @trace_charm( - tracing_endpoint="my_tracing_endpoint", - ) - class MyCharm(CharmBase): - - ... - - @property - def my_tracing_endpoint(self) -> Optional[str]: - '''Tempo endpoint for charm tracing''' - if self.tracing.is_ready(): - return self.tracing.otlp_grpc_endpoint() # OLD API, DEPRECATED. - else: - return None -``` - -needs to be replaced with: - -``` - from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm - - @trace_charm( - tracing_endpoint="my_tracing_endpoint", - ) - class MyCharm(CharmBase): - - ... - - @property - def my_tracing_endpoint(self) -> Optional[str]: - '''Tempo endpoint for charm tracing''' - if self.tracing.is_ready(): - return self.tracing.get_endpoint("otlp_http") # NEW API, use this. - else: - return None -``` - -3) If you were passing a certificate (str) using `server_cert`, you need to change it to -provide an *absolute* path to the certificate file instead. -""" -import typing - -from opentelemetry.exporter.otlp.proto.common._internal.trace_encoder import encode_spans -from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter - - -def _remove_stale_otel_sdk_packages(): - """Hack to remove stale opentelemetry sdk packages from the charm's python venv. - - See https://github.com/canonical/grafana-agent-operator/issues/146 and - https://bugs.launchpad.net/juju/+bug/2058335 for more context. This patch can be removed after - this juju issue is resolved and sufficient time has passed to expect most users of this library - have migrated to the patched version of juju. When this patch is removed, un-ignore rule E402 for this file in the pyproject.toml (see setting - [tool.ruff.lint.per-file-ignores] in pyproject.toml). - - This only has an effect if executed on an upgrade-charm event. - """ - # all imports are local to keep this function standalone, side-effect-free, and easy to revert later - import os - - if os.getenv("JUJU_DISPATCH_PATH") != "hooks/upgrade-charm": - return - - import logging - import shutil - from collections import defaultdict - - from importlib_metadata import distributions - - otel_logger = logging.getLogger("charm_tracing_otel_patcher") - otel_logger.debug("Applying _remove_stale_otel_sdk_packages patch on charm upgrade") - # group by name all distributions starting with "opentelemetry_" - otel_distributions = defaultdict(list) - for distribution in distributions(): - name = distribution._normalized_name # type: ignore - if name.startswith("opentelemetry_"): - otel_distributions[name].append(distribution) - - otel_logger.debug(f"Found {len(otel_distributions)} opentelemetry distributions") - - # If we have multiple distributions with the same name, remove any that have 0 associated files - for name, distributions_ in otel_distributions.items(): - if len(distributions_) <= 1: - continue - - otel_logger.debug(f"Package {name} has multiple ({len(distributions_)}) distributions.") - for distribution in distributions_: - if not distribution.files: # Not None or empty list - path = distribution._path # type: ignore - otel_logger.info(f"Removing empty distribution of {name} at {path}.") - shutil.rmtree(path) - - otel_logger.debug("Successfully applied _remove_stale_otel_sdk_packages patch. ") - - -# apply hacky patch to remove stale opentelemetry sdk packages on upgrade-charm. -# it could be trouble if someone ever decides to implement their own tracer parallel to -# ours and before the charm has inited. We assume they won't. -_remove_stale_otel_sdk_packages() - -import functools -import inspect -import logging -import os -from contextlib import contextmanager -from contextvars import Context, ContextVar, copy_context -from pathlib import Path -from typing import Any, Callable, Generator, List, Optional, Sequence, Type, TypeVar, Union, cast - -import opentelemetry -import ops -from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter -from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace import ReadableSpan, Span, TracerProvider -from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanExporter, SpanExportResult -from opentelemetry.trace import INVALID_SPAN, Tracer -from opentelemetry.trace import get_current_span as otlp_get_current_span -from opentelemetry.trace import ( - get_tracer, - get_tracer_provider, - set_span_in_context, - set_tracer_provider, -) -from ops.charm import CharmBase -from ops.framework import Framework - -# The unique Charmhub library identifier, never change it -LIBID = "01780f1e588c42c3976d26780fdf9b89" - -# Increment this major API version when introducing breaking changes -LIBAPI = 0 - -# Increment this PATCH version before using `charmcraft publish-lib` or reset -# to 0 if you are raising the major API version - -LIBPATCH = 4 - -PYDEPS = ["opentelemetry-exporter-otlp-proto-http==1.21.0"] - -logger = logging.getLogger("tracing") -dev_logger = logging.getLogger("tracing-dev") - -# set this to 0 if you are debugging/developing this library source -dev_logger.setLevel(logging.ERROR) - -_CharmType = Type[CharmBase] # the type CharmBase and any subclass thereof -_C = TypeVar("_C", bound=_CharmType) -_T = TypeVar("_T", bound=type) -_F = TypeVar("_F", bound=Type[Callable]) -tracer: ContextVar[Tracer] = ContextVar("tracer") -_GetterType = Union[Callable[[_CharmType], Optional[str]], property] - -CHARM_TRACING_ENABLED = "CHARM_TRACING_ENABLED" -BUFFER_DEFAULT_CACHE_FILE_NAME = ".charm_tracing_buffer.raw" -# we store the buffer as raw otlp-native protobuf (bytes) since it's hard to serialize/deserialize it in -# any portable format. Json dumping is supported, but loading isn't. -# cfr: https://github.com/open-telemetry/opentelemetry-python/issues/1003 - -BUFFER_DEFAULT_CACHE_FILE_SIZE_LIMIT_MiB = 10 -_BUFFER_CACHE_FILE_SIZE_LIMIT_MiB_MIN = 10 -BUFFER_DEFAULT_MAX_EVENT_HISTORY_LENGTH = 100 -_MiB_TO_B = 2**20 # megabyte to byte conversion rate -_OTLP_SPAN_EXPORTER_TIMEOUT = 1 -"""Timeout in seconds that the OTLP span exporter has to push traces to the backend.""" - - -class _Buffer: - """Handles buffering for spans emitted while no tracing backend is configured or available. - - Use the max_event_history_length_buffering param of @trace_charm to tune - the amount of memory that this will hog on your units. - - The buffer is formatted as a bespoke byte dump (protobuf limitation). - We cannot store them as json because that is not well-supported by the sdk - (see https://github.com/open-telemetry/opentelemetry-python/issues/3364). - """ - - _SPANSEP = b"__CHARM_TRACING_BUFFER_SPAN_SEP__" - - def __init__(self, db_file: Path, max_event_history_length: int, max_buffer_size_mib: int): - self._db_file = db_file - self._max_event_history_length = max_event_history_length - self._max_buffer_size_mib = max(max_buffer_size_mib, _BUFFER_CACHE_FILE_SIZE_LIMIT_MiB_MIN) - - # set by caller - self.exporter: Optional[OTLPSpanExporter] = None - - def save(self, spans: typing.Sequence[ReadableSpan]): - """Save the spans collected by this exporter to the cache file. - - This method should be as fail-safe as possible. - """ - if self._max_event_history_length < 1: - dev_logger.debug("buffer disabled: max history length < 1") - return - - current_history_length = len(self.load()) - new_history_length = current_history_length + len(spans) - if (diff := self._max_event_history_length - new_history_length) < 0: - self.drop(diff) - self._save(spans) - - def _serialize(self, spans: Sequence[ReadableSpan]) -> bytes: - # encode because otherwise we can't json-dump them - return encode_spans(spans).SerializeToString() - - def _save(self, spans: Sequence[ReadableSpan], replace: bool = False): - dev_logger.debug(f"saving {len(spans)} new spans to buffer") - old = [] if replace else self.load() - new = self._serialize(spans) - - try: - # if the buffer exceeds the size limit, we start dropping old spans until it does - - while len((new + self._SPANSEP.join(old))) > (self._max_buffer_size_mib * _MiB_TO_B): - if not old: - # if we've already dropped all spans and still we can't get under the - # size limit, we can't save this span - logger.error( - f"span exceeds total buffer size limit ({self._max_buffer_size_mib}MiB); " - f"buffering FAILED" - ) - return - - old = old[1:] - logger.warning( - f"buffer size exceeds {self._max_buffer_size_mib}MiB; dropping older spans... " - f"Please increase the buffer size, disable buffering, or ensure the spans can be flushed." - ) - - self._db_file.write_bytes(new + self._SPANSEP.join(old)) - except Exception: - logger.exception("error buffering spans") - - def load(self) -> List[bytes]: - """Load currently buffered spans from the cache file. - - This method should be as fail-safe as possible. - """ - if not self._db_file.exists(): - dev_logger.debug("buffer file not found. buffer empty.") - return [] - try: - spans = self._db_file.read_bytes().split(self._SPANSEP) - except Exception: - logger.exception(f"error parsing {self._db_file}") - return [] - return spans - - def drop(self, n_spans: Optional[int] = None): - """Drop some currently buffered spans from the cache file.""" - current = self.load() - if n_spans: - dev_logger.debug(f"dropping {n_spans} spans from buffer") - new = current[n_spans:] - else: - dev_logger.debug("emptying buffer") - new = [] - - self._db_file.write_bytes(self._SPANSEP.join(new)) - - def flush(self) -> Optional[bool]: - """Export all buffered spans to the given exporter, then clear the buffer. - - Returns whether the flush was successful, and None if there was nothing to flush. - """ - if not self.exporter: - dev_logger.debug("no exporter set; skipping buffer flush") - return False - - buffered_spans = self.load() - if not buffered_spans: - dev_logger.debug("nothing to flush; buffer empty") - return None - - errors = False - for span in buffered_spans: - try: - out = self.exporter._export(span) # type: ignore - if not (200 <= out.status_code < 300): - # take any 2xx status code as a success - errors = True - except ConnectionError: - dev_logger.debug( - "failed exporting buffered span; backend might be down or still starting" - ) - errors = True - except Exception: - logger.exception("unexpected error while flushing span batch from buffer") - errors = True - - if not errors: - self.drop() - else: - logger.error("failed flushing spans; buffer preserved") - return not errors - - @property - def is_empty(self): - """Utility to check whether the buffer has any stored spans. - - This is more efficient than attempting a load() given how large the buffer might be. - """ - return (not self._db_file.exists()) or (self._db_file.stat().st_size == 0) - - -class _OTLPSpanExporter(OTLPSpanExporter): - """Subclass of OTLPSpanExporter to configure the max retry timeout, so that it fails a bit faster.""" - - # The issue we're trying to solve is that the model takes AGES to settle if e.g. tls is misconfigured, - # as every hook of a charm_tracing-instrumented charm takes about a minute to exit, as the charm can't - # flush the traces and keeps retrying for 'too long' - - _MAX_RETRY_TIMEOUT = 4 - # we give the exporter 4 seconds in total to succeed pushing the traces to tempo - # if it fails, we'll be caching the data in the buffer and flush it the next time, so there's no data loss risk. - # this means 2/3 retries (hard to guess from the implementation) and up to ~7 seconds total wait - - -class _BufferedExporter(InMemorySpanExporter): - def __init__(self, buffer: _Buffer) -> None: - super().__init__() - self._buffer = buffer - - def export(self, spans: typing.Sequence[ReadableSpan]) -> SpanExportResult: - self._buffer.save(spans) - return super().export(spans) - - def force_flush(self, timeout_millis: int = 0) -> bool: - # parent implementation is fake, so the timeout_millis arg is not doing anything. - result = super().force_flush(timeout_millis) - self._buffer.save(self.get_finished_spans()) - return result - - -def is_enabled() -> bool: - """Whether charm tracing is enabled.""" - return os.getenv(CHARM_TRACING_ENABLED, "1") == "1" - - -@contextmanager -def charm_tracing_disabled(): - """Contextmanager to temporarily disable charm tracing. - - For usage in tests. - """ - previous = os.getenv(CHARM_TRACING_ENABLED, "1") - os.environ[CHARM_TRACING_ENABLED] = "0" - yield - os.environ[CHARM_TRACING_ENABLED] = previous - - -def get_current_span() -> Union[Span, None]: - """Return the currently active Span, if there is one, else None. - - If you'd rather keep your logic unconditional, you can use opentelemetry.trace.get_current_span, - which will return an object that behaves like a span but records no data. - """ - span = otlp_get_current_span() - if span is INVALID_SPAN: - return None - return cast(Span, span) - - -def _get_tracer_from_context(ctx: Context) -> Optional[ContextVar]: - tracers = [v for v in ctx if v is not None and v.name == "tracer"] - if tracers: - return tracers[0] - return None - - -def _get_tracer() -> Optional[Tracer]: - """Find tracer in context variable and as a fallback locate it in the full context.""" - try: - return tracer.get() - except LookupError: - # fallback: this course-corrects for a user error where charm_tracing symbols are imported - # from different paths (typically charms.tempo_coordinator_k8s... and lib.charms.tempo_coordinator_k8s...) - try: - ctx: Context = copy_context() - if context_tracer := _get_tracer_from_context(ctx): - logger.warning( - "Tracer not found in `tracer` context var. " - "Verify that you're importing all `charm_tracing` symbols from the same module path. \n" - "For example, DO" - ": `from charms.lib...charm_tracing import foo, bar`. \n" - "DONT: \n" - " \t - `from charms.lib...charm_tracing import foo` \n" - " \t - `from lib...charm_tracing import bar` \n" - "For more info: https://python-notes.curiousefficiency.org/en/latest/python" - "_concepts/import_traps.html#the-double-import-trap" - ) - return context_tracer.get() - else: - return None - except LookupError: - return None - - -@contextmanager -def _span(name: str) -> Generator[Optional[Span], Any, Any]: - """Context to create a span if there is a tracer, otherwise do nothing.""" - if tracer := _get_tracer(): - with tracer.start_as_current_span(name) as span: - yield cast(Span, span) - else: - yield None - - -class TracingError(RuntimeError): - """Base class for errors raised by this module.""" - - -class UntraceableObjectError(TracingError): - """Raised when an object you're attempting to instrument cannot be autoinstrumented.""" - - -def _get_tracing_endpoint( - tracing_endpoint_attr: str, - charm_instance: object, - charm_type: type, -): - _tracing_endpoint = getattr(charm_instance, tracing_endpoint_attr) - if callable(_tracing_endpoint): - tracing_endpoint = _tracing_endpoint() - else: - tracing_endpoint = _tracing_endpoint - - if tracing_endpoint is None: - return - - elif not isinstance(tracing_endpoint, str): - raise TypeError( - f"{charm_type.__name__}.{tracing_endpoint_attr} should resolve to a tempo endpoint (string); " - f"got {tracing_endpoint} instead." - ) - - dev_logger.debug(f"Setting up span exporter to endpoint: {tracing_endpoint}/v1/traces") - return f"{tracing_endpoint}/v1/traces" - - -def _get_server_cert( - server_cert_attr: str, - charm_instance: ops.CharmBase, - charm_type: Type[ops.CharmBase], -): - _server_cert = getattr(charm_instance, server_cert_attr) - if callable(_server_cert): - server_cert = _server_cert() - else: - server_cert = _server_cert - - if server_cert is None: - logger.warning( - f"{charm_type}.{server_cert_attr} is None; sending traces over INSECURE connection." - ) - return - elif not Path(server_cert).is_absolute(): - raise ValueError( - f"{charm_type}.{server_cert_attr} should resolve to a valid tls cert absolute path (string | Path)); " - f"got {server_cert} instead." - ) - return server_cert - - -def _setup_root_span_initializer( - charm_type: _CharmType, - tracing_endpoint_attr: str, - server_cert_attr: Optional[str], - service_name: Optional[str], - buffer_path: Optional[Path], - buffer_max_events: int, - buffer_max_size_mib: int, -): - """Patch the charm's initializer.""" - original_init = charm_type.__init__ - - @functools.wraps(original_init) - def wrap_init(self: CharmBase, framework: Framework, *args, **kwargs): - # we're using 'self' here because this is charm init code, makes sense to read what's below - # from the perspective of the charm. Self.unit.name... - - original_init(self, framework, *args, **kwargs) - # we call this from inside the init context instead of, say, _autoinstrument, because we want it to - # be checked on a per-charm-instantiation basis, not on a per-type-declaration one. - if not is_enabled(): - # this will only happen during unittesting, hopefully, so it's fine to log a - # bit more verbosely - logger.info("Tracing DISABLED: skipping root span initialization") - return - - original_event_context = framework._event_context - # default service name isn't just app name because it could conflict with the workload service name - _service_name = service_name or f"{self.app.name}-charm" - - unit_name = self.unit.name - resource = Resource.create( - attributes={ - "service.name": _service_name, - "compose_service": _service_name, - "charm_type": type(self).__name__, - # juju topology - "juju_unit": unit_name, - "juju_application": self.app.name, - "juju_model": self.model.name, - "juju_model_uuid": self.model.uuid, - } - ) - provider = TracerProvider(resource=resource) - - # if anything goes wrong with retrieving the endpoint, we let the exception bubble up. - tracing_endpoint = _get_tracing_endpoint(tracing_endpoint_attr, self, charm_type) - - buffer_only = False - # whether we're only exporting to buffer, or also to the otlp exporter. - - if not tracing_endpoint: - # tracing is off if tracing_endpoint is None - # however we can buffer things until tracing comes online - buffer_only = True - - server_cert: Optional[Union[str, Path]] = ( - _get_server_cert(server_cert_attr, self, charm_type) if server_cert_attr else None - ) - - if (tracing_endpoint and tracing_endpoint.startswith("https://")) and not server_cert: - logger.error( - "Tracing endpoint is https, but no server_cert has been passed." - "Please point @trace_charm to a `server_cert` attr. " - "This might also mean that the tracing provider is related to a " - "certificates provider, but this application is not (yet). " - "In that case, you might just have to wait a bit for the certificates " - "integration to settle. This span will be buffered." - ) - buffer_only = True - - buffer = _Buffer( - db_file=buffer_path or Path() / BUFFER_DEFAULT_CACHE_FILE_NAME, - max_event_history_length=buffer_max_events, - max_buffer_size_mib=buffer_max_size_mib, - ) - previous_spans_buffered = not buffer.is_empty - - exporters: List[SpanExporter] = [] - if buffer_only: - # we have to buffer because we're missing necessary backend configuration - dev_logger.debug("buffering mode: ON") - exporters.append(_BufferedExporter(buffer)) - - else: - dev_logger.debug("buffering mode: FALLBACK") - # in principle, we have the right configuration to be pushing traces, - # but if we fail for whatever reason, we will put everything in the buffer - # and retry the next time - otlp_exporter = _OTLPSpanExporter( - endpoint=tracing_endpoint, - certificate_file=str(Path(server_cert).absolute()) if server_cert else None, - timeout=_OTLP_SPAN_EXPORTER_TIMEOUT, # give individual requests 1 second to succeed - ) - exporters.append(otlp_exporter) - exporters.append(_BufferedExporter(buffer)) - buffer.exporter = otlp_exporter - - for exporter in exporters: - processor = BatchSpanProcessor(exporter) - provider.add_span_processor(processor) - - set_tracer_provider(provider) - _tracer = get_tracer(_service_name) # type: ignore - _tracer_token = tracer.set(_tracer) - - dispatch_path = os.getenv("JUJU_DISPATCH_PATH", "") # something like hooks/install - event_name = dispatch_path.split("/")[1] if "/" in dispatch_path else dispatch_path - root_span_name = f"{unit_name}: {event_name} event" - span = _tracer.start_span(root_span_name, attributes={"juju.dispatch_path": dispatch_path}) - - # all these shenanigans are to work around the fact that the opentelemetry tracing API is built - # on the assumption that spans will be used as contextmanagers. - # Since we don't (as we need to close the span on framework.commit), - # we need to manually set the root span as current. - ctx = set_span_in_context(span) - - # log a trace id, so we can pick it up from the logs (and jhack) to look it up in tempo. - root_trace_id = hex(span.get_span_context().trace_id)[2:] # strip 0x prefix - logger.debug(f"Starting root trace with id={root_trace_id!r}.") - - span_token = opentelemetry.context.attach(ctx) # type: ignore - - @contextmanager - def wrap_event_context(event_name: str): - dev_logger.debug(f"entering event context: {event_name}") - # when the framework enters an event context, we create a span. - with _span("event: " + event_name) as event_context_span: - if event_context_span: - # todo: figure out how to inject event attrs in here - event_context_span.add_event(event_name) - yield original_event_context(event_name) - - framework._event_context = wrap_event_context # type: ignore - - original_close = framework.close - - @functools.wraps(original_close) - def wrap_close(): - dev_logger.debug("tearing down tracer and flushing traces") - span.end() - opentelemetry.context.detach(span_token) # type: ignore - tracer.reset(_tracer_token) - tp = cast(TracerProvider, get_tracer_provider()) - flush_successful = tp.force_flush(timeout_millis=1000) # don't block for too long - - if buffer_only: - # if we're in buffer_only mode, it means we couldn't even set up the exporter for - # tempo as we're missing some data. - # so attempting to flush the buffer doesn't make sense - dev_logger.debug("tracing backend unavailable: all spans pushed to buffer") - - else: - dev_logger.debug("tracing backend found: attempting to flush buffer...") - - # if we do have an exporter for tempo, and we could send traces to it, - # we can attempt to flush the buffer as well. - if not flush_successful: - logger.error("flushing FAILED: unable to push traces to backend.") - else: - dev_logger.debug("flush succeeded.") - - # the backend has accepted the spans generated during this event, - if not previous_spans_buffered: - # if the buffer was empty to begin with, any spans we collected now can be discarded - buffer.drop() - dev_logger.debug("buffer dropped: this trace has been sent already") - else: - # if the buffer was nonempty, we can attempt to flush it - dev_logger.debug("attempting buffer flush...") - buffer_flush_successful = buffer.flush() - if buffer_flush_successful: - dev_logger.debug("buffer flush OK") - elif buffer_flush_successful is None: - # TODO is this even possible? - dev_logger.debug("buffer flush OK; empty: nothing to flush") - else: - # this situation is pretty weird, I'm not even sure it can happen, - # because it would mean that we did manage - # to push traces directly to the tempo exporter (flush_successful), - # but the buffer flush failed to push to the same exporter! - logger.error("buffer flush FAILED") - - tp.shutdown() - original_close() - - framework.close = wrap_close - return - - charm_type.__init__ = wrap_init # type: ignore - - -def trace_charm( - tracing_endpoint: str, - server_cert: Optional[str] = None, - service_name: Optional[str] = None, - extra_types: Sequence[type] = (), - buffer_max_events: int = BUFFER_DEFAULT_MAX_EVENT_HISTORY_LENGTH, - buffer_max_size_mib: int = BUFFER_DEFAULT_CACHE_FILE_SIZE_LIMIT_MiB, - buffer_path: Optional[Union[str, Path]] = None, -) -> Callable[[_T], _T]: - """Autoinstrument the decorated charm with tracing telemetry. - - Use this function to get out-of-the-box traces for all events emitted on this charm and all - method calls on instances of this class. - - Usage: - >>> from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm - >>> from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer - >>> from ops import CharmBase - >>> - >>> @trace_charm( - >>> tracing_endpoint="tempo_otlp_http_endpoint", - >>> ) - >>> class MyCharm(CharmBase): - >>> - >>> def __init__(self, framework: Framework): - >>> ... - >>> self.tracing = TracingEndpointRequirer(self) - >>> - >>> @property - >>> def tempo_otlp_http_endpoint(self) -> Optional[str]: - >>> if self.tracing.is_ready(): - >>> return self.tracing.otlp_http_endpoint() - >>> else: - >>> return None - >>> - - :param tracing_endpoint: name of a method, property or attribute on the charm type that returns an - optional (fully resolvable) tempo url to which the charm traces will be pushed. - If None, tracing will be effectively disabled. - :param server_cert: name of a method, property or attribute on the charm type that returns an - optional absolute path to a CA certificate file to be used when sending traces to a remote server. - If it returns None, an _insecure_ connection will be used. To avoid errors in transient - situations where the endpoint is already https but there is no certificate on disk yet, it - is recommended to disable tracing (by returning None from the tracing_endpoint) altogether - until the cert has been written to disk. - :param service_name: service name tag to attach to all traces generated by this charm. - Defaults to the juju application name this charm is deployed under. - :param extra_types: pass any number of types that you also wish to autoinstrument. - For example, charm libs, relation endpoint wrappers, workload abstractions, ... - :param buffer_max_events: max number of events to save in the buffer. Set to 0 to disable buffering. - :param buffer_max_size_mib: max size of the buffer file. When exceeded, spans will be dropped. - Minimum 10MiB. - :param buffer_path: path to buffer file to use for saving buffered spans. - """ - - def _decorator(charm_type: _T) -> _T: - """Autoinstrument the wrapped charmbase type.""" - _autoinstrument( - charm_type, - tracing_endpoint_attr=tracing_endpoint, - server_cert_attr=server_cert, - service_name=service_name, - extra_types=extra_types, - buffer_path=Path(buffer_path) if buffer_path else None, - buffer_max_size_mib=buffer_max_size_mib, - buffer_max_events=buffer_max_events, - ) - return charm_type - - return _decorator - - -def _autoinstrument( - charm_type: _T, - tracing_endpoint_attr: str, - server_cert_attr: Optional[str] = None, - service_name: Optional[str] = None, - extra_types: Sequence[type] = (), - buffer_max_events: int = BUFFER_DEFAULT_MAX_EVENT_HISTORY_LENGTH, - buffer_max_size_mib: int = BUFFER_DEFAULT_CACHE_FILE_SIZE_LIMIT_MiB, - buffer_path: Optional[Path] = None, -) -> _T: - """Set up tracing on this charm class. - - Use this function to get out-of-the-box traces for all events emitted on this charm and all - method calls on instances of this class. - - Usage: - - >>> from charms.tempo_coordinator_k8s.v0.charm_tracing import _autoinstrument - >>> from ops.main import main - >>> _autoinstrument( - >>> MyCharm, - >>> tracing_endpoint_attr="tempo_otlp_http_endpoint", - >>> service_name="MyCharm", - >>> extra_types=(Foo, Bar) - >>> ) - >>> main(MyCharm) - - :param charm_type: the CharmBase subclass to autoinstrument. - :param tracing_endpoint_attr: name of a method, property or attribute on the charm type that returns an - optional (fully resolvable) tempo url to which the charm traces will be pushed. - If None, tracing will be effectively disabled. - :param server_cert_attr: name of a method, property or attribute on the charm type that returns an - optional absolute path to a CA certificate file to be used when sending traces to a remote server. - If it returns None, an _insecure_ connection will be used. To avoid errors in transient - situations where the endpoint is already https but there is no certificate on disk yet, it - is recommended to disable tracing (by returning None from the tracing_endpoint) altogether - until the cert has been written to disk. - :param service_name: service name tag to attach to all traces generated by this charm. - Defaults to the juju application name this charm is deployed under. - :param extra_types: pass any number of types that you also wish to autoinstrument. - For example, charm libs, relation endpoint wrappers, workload abstractions, ... - :param buffer_max_events: max number of events to save in the buffer. Set to 0 to disable buffering. - :param buffer_max_size_mib: max size of the buffer file. When exceeded, spans will be dropped. - Minimum 10MiB. - :param buffer_path: path to buffer file to use for saving buffered spans. - """ - dev_logger.debug(f"instrumenting {charm_type}") - _setup_root_span_initializer( - charm_type, - tracing_endpoint_attr, - server_cert_attr=server_cert_attr, - service_name=service_name, - buffer_path=buffer_path, - buffer_max_events=buffer_max_events, - buffer_max_size_mib=buffer_max_size_mib, - ) - trace_type(charm_type) - for type_ in extra_types: - trace_type(type_) - - return charm_type - - -def trace_type(cls: _T) -> _T: - """Set up tracing on this class. - - Use this decorator to get out-of-the-box traces for all method calls on instances of this class. - It assumes that this class is only instantiated after a charm type decorated with `@trace_charm` - has been instantiated. - """ - dev_logger.debug(f"instrumenting {cls}") - for name, method in inspect.getmembers(cls, predicate=inspect.isfunction): - dev_logger.debug(f"discovered {method}") - - if method.__name__.startswith("__"): - dev_logger.debug(f"skipping {method} (dunder)") - continue - - # the span title in the general case should be: - # method call: MyCharmWrappedMethods.b - # if the method has a name (functools.wrapped or regular method), let - # _trace_callable use its default algorithm to determine what name to give the span. - trace_method_name = None - try: - qualname_c0 = method.__qualname__.split(".")[0] - if not hasattr(cls, method.__name__): - # if the callable doesn't have a __name__ (probably a decorated method), - # it probably has a bad qualname too (such as my_decorator..wrapper) which is not - # great for finding out what the trace is about. So we use the method name instead and - # add a reference to the decorator name. Result: - # method call: @my_decorator(MyCharmWrappedMethods.b) - trace_method_name = f"@{qualname_c0}({cls.__name__}.{name})" - except Exception: # noqa: failsafe - pass - - new_method = trace_method(method, name=trace_method_name) - - if isinstance(inspect.getattr_static(cls, name), staticmethod): - new_method = staticmethod(new_method) - setattr(cls, name, new_method) - - return cls - - -def trace_method(method: _F, name: Optional[str] = None) -> _F: - """Trace this method. - - A span will be opened when this method is called and closed when it returns. - """ - return _trace_callable(method, "method", name=name) - - -def trace_function(function: _F, name: Optional[str] = None) -> _F: - """Trace this function. - - A span will be opened when this function is called and closed when it returns. - """ - return _trace_callable(function, "function", name=name) - - -def _trace_callable(callable: _F, qualifier: str, name: Optional[str] = None) -> _F: - dev_logger.debug(f"instrumenting {callable}") - - # sig = inspect.signature(callable) - @functools.wraps(callable) - def wrapped_function(*args, **kwargs): # type: ignore - name_ = name or getattr( - callable, "__qualname__", getattr(callable, "__name__", str(callable)) - ) - with _span(f"{qualifier} call: {name_}"): # type: ignore - return callable(*args, **kwargs) # type: ignore - - # wrapped_function.__signature__ = sig - return wrapped_function # type: ignore - - -def trace(obj: Union[Type, Callable]): - """Trace this object and send the resulting spans to Tempo. - - It will dispatch to ``trace_type`` if the decorated object is a class, otherwise - ``trace_function``. - """ - if isinstance(obj, type): - if issubclass(obj, CharmBase): - raise ValueError( - "cannot use @trace on CharmBase subclasses: use @trace_charm instead " - "(we need some arguments!)" - ) - return trace_type(obj) - else: - try: - return trace_function(obj) - except Exception: - raise UntraceableObjectError( - f"cannot create span from {type(obj)}; instrument {obj} manually." - ) diff --git a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py deleted file mode 100644 index 4516af6..0000000 --- a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py +++ /dev/null @@ -1,998 +0,0 @@ -# Copyright 2024 Canonical Ltd. -# See LICENSE file for licensing details. -"""## Overview. - -This document explains how to integrate with the Tempo charm for the purpose of pushing traces to a -tracing endpoint provided by Tempo. It also explains how alternative implementations of the Tempo charm -may maintain the same interface and be backward compatible with all currently integrated charms. - -## Requirer Library Usage - -Charms seeking to push traces to Tempo, must do so using the `TracingEndpointRequirer` -object from this charm library. For the simplest use cases, using the `TracingEndpointRequirer` -object only requires instantiating it, typically in the constructor of your charm. The -`TracingEndpointRequirer` constructor requires the name of the relation over which a tracing endpoint - is exposed by the Tempo charm, and a list of protocols it intends to send traces with. - This relation must use the `tracing` interface. - The `TracingEndpointRequirer` object may be instantiated as follows - - from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer - - def __init__(self, *args): - super().__init__(*args) - # ... - self.tracing = TracingEndpointRequirer(self, - protocols=['otlp_grpc', 'otlp_http', 'jaeger_http_thrift'] - ) - # ... - -Note that the first argument (`self`) to `TracingEndpointRequirer` is always a reference to the -parent charm. - -Alternatively to providing the list of requested protocols at init time, the charm can do it at -any point in time by calling the -`TracingEndpointRequirer.request_protocols(*protocol:str, relation:Optional[Relation])` method. -Using this method also allows you to use per-relation protocols. - -Units of requirer charms obtain the tempo endpoint to which they will push their traces by calling -`TracingEndpointRequirer.get_endpoint(protocol: str)`, where `protocol` is, for example: -- `otlp_grpc` -- `otlp_http` -- `zipkin` -- `tempo` - -If the `protocol` is not in the list of protocols that the charm requested at endpoint set-up time, -the library will raise an error. - -We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests -go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down. - -## Provider Library Usage - -The `TracingEndpointProvider` object may be used by charms to manage relations with their -trace sources. For this purposes a Tempo-like charm needs to do two things - -1. Instantiate the `TracingEndpointProvider` object by providing it a -reference to the parent (Tempo) charm and optionally the name of the relation that the Tempo charm -uses to interact with its trace sources. This relation must conform to the `tracing` interface -and it is strongly recommended that this relation be named `tracing` which is its -default value. - -For example a Tempo charm may instantiate the `TracingEndpointProvider` in its constructor as -follows - - from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointProvider - - def __init__(self, *args): - super().__init__(*args) - # ... - self.tracing = TracingEndpointProvider(self) - # ... - - - -""" # noqa: W505 -import enum -import json -import logging -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Dict, - List, - Literal, - MutableMapping, - Optional, - Sequence, - Tuple, - Union, - cast, -) - -import pydantic -from ops.charm import CharmBase, CharmEvents, RelationBrokenEvent, RelationEvent, RelationRole -from ops.framework import EventSource, Object -from ops.model import ModelError, Relation -from pydantic import BaseModel, Field - -# The unique Charmhub library identifier, never change it -LIBID = "d2f02b1f8d1244b5989fd55bc3a28943" - -# Increment this major API version when introducing breaking changes -LIBAPI = 0 - -# Increment this PATCH version before using `charmcraft publish-lib` or reset -# to 0 if you are raising the major API version -LIBPATCH = 3 - -PYDEPS = ["pydantic"] - -logger = logging.getLogger(__name__) - -DEFAULT_RELATION_NAME = "tracing" -RELATION_INTERFACE_NAME = "tracing" - -# Supported list rationale https://github.com/canonical/tempo-coordinator-k8s-operator/issues/8 -ReceiverProtocol = Literal[ - "zipkin", - "otlp_grpc", - "otlp_http", - "jaeger_grpc", - "jaeger_thrift_http", -] - -RawReceiver = Tuple[ReceiverProtocol, str] -"""Helper type. A raw receiver is defined as a tuple consisting of the protocol name, and the (external, if available), -(secured, if available) resolvable server url. -""" - -BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"} - - -class TransportProtocolType(str, enum.Enum): - """Receiver Type.""" - - http = "http" - grpc = "grpc" - - -receiver_protocol_to_transport_protocol: Dict[ReceiverProtocol, TransportProtocolType] = { - "zipkin": TransportProtocolType.http, - "otlp_grpc": TransportProtocolType.grpc, - "otlp_http": TransportProtocolType.http, - "jaeger_thrift_http": TransportProtocolType.http, - "jaeger_grpc": TransportProtocolType.grpc, -} -"""A mapping between telemetry protocols and their corresponding transport protocol. -""" - - -class TracingError(Exception): - """Base class for custom errors raised by this library.""" - - -class NotReadyError(TracingError): - """Raised by the provider wrapper if a requirer hasn't published the required data (yet).""" - - -class ProtocolNotRequestedError(TracingError): - """Raised if the user attempts to obtain an endpoint for a protocol it did not request.""" - - -class DataValidationError(TracingError): - """Raised when data validation fails on IPU relation data.""" - - -class AmbiguousRelationUsageError(TracingError): - """Raised when one wrongly assumes that there can only be one relation on an endpoint.""" - - -if int(pydantic.version.VERSION.split(".")[0]) < 2: - - class DatabagModel(BaseModel): # type: ignore - """Base databag model.""" - - class Config: - """Pydantic config.""" - - # ignore any extra fields in the databag - extra = "ignore" - """Ignore any extra fields in the databag.""" - allow_population_by_field_name = True - """Allow instantiating this class by field name (instead of forcing alias).""" - - _NEST_UNDER = None - - @classmethod - def load(cls, databag: MutableMapping): - """Load this model from a Juju databag.""" - if cls._NEST_UNDER: - return cls.parse_obj(json.loads(databag[cls._NEST_UNDER])) - - try: - data = { - k: json.loads(v) - for k, v in databag.items() - # Don't attempt to parse model-external values - if k in {f.alias for f in cls.__fields__.values()} - } - except json.JSONDecodeError as e: - msg = f"invalid databag contents: expecting json. {databag}" - logger.error(msg) - raise DataValidationError(msg) from e - - try: - return cls.parse_raw(json.dumps(data)) # type: ignore - except pydantic.ValidationError as e: - msg = f"failed to validate databag: {databag}" - logger.debug(msg, exc_info=True) - raise DataValidationError(msg) from e - - def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): - """Write the contents of this model to Juju databag. - - :param databag: the databag to write the data to. - :param clear: ensure the databag is cleared before writing it. - """ - if clear and databag: - databag.clear() - - if databag is None: - databag = {} - - if self._NEST_UNDER: - databag[self._NEST_UNDER] = self.json(by_alias=True) - return databag - - dct = self.dict() - for key, field in self.__fields__.items(): # type: ignore - value = dct[key] - databag[field.alias or key] = json.dumps(value) - - return databag - -else: - from pydantic import ConfigDict - - class DatabagModel(BaseModel): - """Base databag model.""" - - model_config = ConfigDict( - # ignore any extra fields in the databag - extra="ignore", - # Allow instantiating this class by field name (instead of forcing alias). - populate_by_name=True, - # Custom config key: whether to nest the whole datastructure (as json) - # under a field or spread it out at the toplevel. - _NEST_UNDER=None, # type: ignore - ) - """Pydantic config.""" - - @classmethod - def load(cls, databag: MutableMapping): - """Load this model from a Juju databag.""" - nest_under = cls.model_config.get("_NEST_UNDER") # type: ignore - if nest_under: - return cls.model_validate(json.loads(databag[nest_under])) # type: ignore - - try: - data = { - k: json.loads(v) - for k, v in databag.items() - # Don't attempt to parse model-external values - if k in {(f.alias or n) for n, f in cls.__fields__.items()} - } - except json.JSONDecodeError as e: - msg = f"invalid databag contents: expecting json. {databag}" - logger.error(msg) - raise DataValidationError(msg) from e - - try: - return cls.model_validate_json(json.dumps(data)) # type: ignore - except pydantic.ValidationError as e: - msg = f"failed to validate databag: {databag}" - logger.debug(msg, exc_info=True) - raise DataValidationError(msg) from e - - def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): - """Write the contents of this model to Juju databag. - - :param databag: the databag to write the data to. - :param clear: ensure the databag is cleared before writing it. - """ - if clear and databag: - databag.clear() - - if databag is None: - databag = {} - nest_under = self.model_config.get("_NEST_UNDER") - if nest_under: - databag[nest_under] = self.model_dump_json( # type: ignore - by_alias=True, - # skip keys whose values are default - exclude_defaults=True, - ) - return databag - - dct = self.model_dump() # type: ignore - for key, field in self.model_fields.items(): # type: ignore - value = dct[key] - if value == field.default: - continue - databag[field.alias or key] = json.dumps(value) - - return databag - - -# todo use models from charm-relation-interfaces -if int(pydantic.version.VERSION.split(".")[0]) < 2: - - class ProtocolType(BaseModel): # type: ignore - """Protocol Type.""" - - class Config: - """Pydantic config.""" - - use_enum_values = True - """Allow serializing enum values.""" - - name: str = Field( - ..., - description="Receiver protocol name. What protocols are supported (and what they are called) " - "may differ per provider.", - examples=["otlp_grpc", "otlp_http", "tempo_http"], - ) - - type: TransportProtocolType = Field( - ..., - description="The transport protocol used by this receiver.", - examples=["http", "grpc"], - ) - -else: - - class ProtocolType(BaseModel): - """Protocol Type.""" - - model_config = ConfigDict( # type: ignore - # Allow serializing enum values. - use_enum_values=True - ) - """Pydantic config.""" - - name: str = Field( - ..., - description="Receiver protocol name. What protocols are supported (and what they are called) " - "may differ per provider.", - examples=["otlp_grpc", "otlp_http", "tempo_http"], - ) - - type: TransportProtocolType = Field( - ..., - description="The transport protocol used by this receiver.", - examples=["http", "grpc"], - ) - - -class Receiver(BaseModel): - """Specification of an active receiver.""" - - protocol: ProtocolType = Field(..., description="Receiver protocol name and type.") - url: str = Field( - ..., - description="""URL at which the receiver is reachable. If there's an ingress, it would be the external URL. - Otherwise, it would be the service's fqdn or internal IP. - If the protocol type is grpc, the url will not contain a scheme.""", - examples=[ - "http://traefik_address:2331", - "https://traefik_address:2331", - "http://tempo_public_ip:2331", - "https://tempo_public_ip:2331", - "tempo_public_ip:2331", - ], - ) - - -class TracingProviderAppData(DatabagModel): # noqa: D101 - """Application databag model for the tracing provider.""" - - receivers: List[Receiver] = Field( - ..., - description="List of all receivers enabled on the tracing provider.", - ) - - -class TracingRequirerAppData(DatabagModel): # noqa: D101 - """Application databag model for the tracing requirer.""" - - receivers: List[ReceiverProtocol] - """Requested receivers.""" - - -class _AutoSnapshotEvent(RelationEvent): - __args__: Tuple[str, ...] = () - __optional_kwargs__: Dict[str, Any] = {} - - @classmethod - def __attrs__(cls): - return cls.__args__ + tuple(cls.__optional_kwargs__.keys()) - - def __init__(self, handle, relation, *args, **kwargs): - super().__init__(handle, relation) - - if not len(self.__args__) == len(args): - raise TypeError("expected {} args, got {}".format(len(self.__args__), len(args))) - - for attr, obj in zip(self.__args__, args): - setattr(self, attr, obj) - for attr, default in self.__optional_kwargs__.items(): - obj = kwargs.get(attr, default) - setattr(self, attr, obj) - - def snapshot(self) -> dict: - dct = super().snapshot() - for attr in self.__attrs__(): - obj = getattr(self, attr) - try: - dct[attr] = obj - except ValueError as e: - raise ValueError( - "cannot automagically serialize {}: " - "override this method and do it " - "manually.".format(obj) - ) from e - - return dct - - def restore(self, snapshot: dict) -> None: - super().restore(snapshot) - for attr, obj in snapshot.items(): - setattr(self, attr, obj) - - -class RelationNotFoundError(Exception): - """Raised if no relation with the given name is found.""" - - def __init__(self, relation_name: str): - self.relation_name = relation_name - self.message = "No relation named '{}' found".format(relation_name) - super().__init__(self.message) - - -class RelationInterfaceMismatchError(Exception): - """Raised if the relation with the given name has an unexpected interface.""" - - def __init__( - self, - relation_name: str, - expected_relation_interface: str, - actual_relation_interface: str, - ): - self.relation_name = relation_name - self.expected_relation_interface = expected_relation_interface - self.actual_relation_interface = actual_relation_interface - self.message = ( - "The '{}' relation has '{}' as interface rather than the expected '{}'".format( - relation_name, actual_relation_interface, expected_relation_interface - ) - ) - - super().__init__(self.message) - - -class RelationRoleMismatchError(Exception): - """Raised if the relation with the given name has a different role than expected.""" - - def __init__( - self, - relation_name: str, - expected_relation_role: RelationRole, - actual_relation_role: RelationRole, - ): - self.relation_name = relation_name - self.expected_relation_interface = expected_relation_role - self.actual_relation_role = actual_relation_role - self.message = "The '{}' relation has role '{}' rather than the expected '{}'".format( - relation_name, repr(actual_relation_role), repr(expected_relation_role) - ) - - super().__init__(self.message) - - -def _validate_relation_by_interface_and_direction( - charm: CharmBase, - relation_name: str, - expected_relation_interface: str, - expected_relation_role: RelationRole, -): - """Validate a relation. - - Verifies that the `relation_name` provided: (1) exists in metadata.yaml, - (2) declares as interface the interface name passed as `relation_interface` - and (3) has the right "direction", i.e., it is a relation that `charm` - provides or requires. - - Args: - charm: a `CharmBase` object to scan for the matching relation. - relation_name: the name of the relation to be verified. - expected_relation_interface: the interface name to be matched by the - relation named `relation_name`. - expected_relation_role: whether the `relation_name` must be either - provided or required by `charm`. - - Raises: - RelationNotFoundError: If there is no relation in the charm's metadata.yaml - with the same name as provided via `relation_name` argument. - RelationInterfaceMismatchError: The relation with the same name as provided - via `relation_name` argument does not have the same relation interface - as specified via the `expected_relation_interface` argument. - RelationRoleMismatchError: If the relation with the same name as provided - via `relation_name` argument does not have the same role as specified - via the `expected_relation_role` argument. - """ - if relation_name not in charm.meta.relations: - raise RelationNotFoundError(relation_name) - - relation = charm.meta.relations[relation_name] - - # fixme: why do we need to cast here? - actual_relation_interface = cast(str, relation.interface_name) - - if actual_relation_interface != expected_relation_interface: - raise RelationInterfaceMismatchError( - relation_name, expected_relation_interface, actual_relation_interface - ) - - if expected_relation_role is RelationRole.provides: - if relation_name not in charm.meta.provides: - raise RelationRoleMismatchError( - relation_name, RelationRole.provides, RelationRole.requires - ) - elif expected_relation_role is RelationRole.requires: - if relation_name not in charm.meta.requires: - raise RelationRoleMismatchError( - relation_name, RelationRole.requires, RelationRole.provides - ) - else: - raise TypeError("Unexpected RelationDirection: {}".format(expected_relation_role)) - - -class RequestEvent(RelationEvent): - """Event emitted when a remote requests a tracing endpoint.""" - - @property - def requested_receivers(self) -> List[ReceiverProtocol]: - """List of receiver protocols that have been requested.""" - relation = self.relation - app = relation.app - if not app: - raise NotReadyError("relation.app is None") - - return TracingRequirerAppData.load(relation.data[app]).receivers - - -class BrokenEvent(RelationBrokenEvent): - """Event emitted when a relation on tracing is broken.""" - - -class TracingEndpointProviderEvents(CharmEvents): - """TracingEndpointProvider events.""" - - request = EventSource(RequestEvent) - broken = EventSource(BrokenEvent) - - -class TracingEndpointProvider(Object): - """Class representing a trace receiver service.""" - - on = TracingEndpointProviderEvents() # type: ignore - - def __init__( - self, - charm: CharmBase, - external_url: Optional[str] = None, - relation_name: str = DEFAULT_RELATION_NAME, - ): - """Initialize. - - Args: - charm: a `CharmBase` instance that manages this instance of the Tempo service. - external_url: external address of the node hosting the tempo server, - if an ingress is present. - relation_name: an optional string name of the relation between `charm` - and the Tempo charmed service. The default is "tracing". - - Raises: - RelationNotFoundError: If there is no relation in the charm's metadata.yaml - with the same name as provided via `relation_name` argument. - RelationInterfaceMismatchError: The relation with the same name as provided - via `relation_name` argument does not have the `tracing` relation - interface. - RelationRoleMismatchError: If the relation with the same name as provided - via `relation_name` argument does not have the `RelationRole.requires` - role. - """ - _validate_relation_by_interface_and_direction( - charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.provides - ) - - super().__init__(charm, relation_name + "tracing-provider") - self._charm = charm - self._external_url = external_url - self._relation_name = relation_name - self.framework.observe( - self._charm.on[relation_name].relation_joined, self._on_relation_event - ) - self.framework.observe( - self._charm.on[relation_name].relation_created, self._on_relation_event - ) - self.framework.observe( - self._charm.on[relation_name].relation_changed, self._on_relation_event - ) - self.framework.observe( - self._charm.on[relation_name].relation_broken, self._on_relation_broken_event - ) - - def _on_relation_broken_event(self, e: RelationBrokenEvent): - """Handle relation broken events.""" - self.on.broken.emit(e.relation) - - def _on_relation_event(self, e: RelationEvent): - """Handle relation created/joined/changed events.""" - if self.is_requirer_ready(e.relation): - self.on.request.emit(e.relation) - - def is_requirer_ready(self, relation: Relation): - """Attempt to determine if requirer has already populated app data.""" - try: - self._get_requested_protocols(relation) - except NotReadyError: - return False - return True - - @staticmethod - def _get_requested_protocols(relation: Relation): - app = relation.app - if not app: - raise NotReadyError("relation.app is None") - - try: - databag = TracingRequirerAppData.load(relation.data[app]) - except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): - logger.info(f"relation {relation} is not ready to talk tracing") - raise NotReadyError() - return databag.receivers - - def requested_protocols(self): - """All receiver protocols that have been requested by our related apps.""" - requested_protocols = set() - for relation in self.relations: - try: - protocols = self._get_requested_protocols(relation) - except NotReadyError: - continue - requested_protocols.update(protocols) - return requested_protocols - - @property - def relations(self) -> List[Relation]: - """All relations active on this endpoint.""" - return self._charm.model.relations[self._relation_name] - - def publish_receivers(self, receivers: Sequence[RawReceiver]): - """Let all requirers know that these receivers are active and listening.""" - if not self._charm.unit.is_leader(): - raise RuntimeError("only leader can do this") - - for relation in self.relations: - try: - TracingProviderAppData( - receivers=[ - Receiver( - url=url, - protocol=ProtocolType( - name=protocol, - type=receiver_protocol_to_transport_protocol[protocol], - ), - ) - for protocol, url in receivers - ], - ).dump(relation.data[self._charm.app]) - - except ModelError as e: - # args are bytes - msg = e.args[0] - if isinstance(msg, bytes): - if msg.startswith( - b"ERROR cannot read relation application settings: permission denied" - ): - logger.error( - f"encountered error {e} while attempting to update_relation_data." - f"The relation must be gone." - ) - continue - raise - - -class EndpointRemovedEvent(RelationBrokenEvent): - """Event representing a change in one of the receiver endpoints.""" - - -class EndpointChangedEvent(_AutoSnapshotEvent): - """Event representing a change in one of the receiver endpoints.""" - - __args__ = ("_receivers",) - - if TYPE_CHECKING: - _receivers = [] # type: List[dict] - - @property - def receivers(self) -> List[Receiver]: - """Cast receivers back from dict.""" - return [Receiver(**i) for i in self._receivers] - - -class TracingEndpointRequirerEvents(CharmEvents): - """TracingEndpointRequirer events.""" - - endpoint_changed = EventSource(EndpointChangedEvent) - endpoint_removed = EventSource(EndpointRemovedEvent) - - -class TracingEndpointRequirer(Object): - """A tracing endpoint for Tempo.""" - - on = TracingEndpointRequirerEvents() # type: ignore - - def __init__( - self, - charm: CharmBase, - relation_name: str = DEFAULT_RELATION_NAME, - protocols: Optional[List[ReceiverProtocol]] = None, - ): - """Construct a tracing requirer for a Tempo charm. - - If your application supports pushing traces to a distributed tracing backend, the - `TracingEndpointRequirer` object enables your charm to easily access endpoint information - exchanged over a `tracing` relation interface. - - Args: - charm: a `CharmBase` object that manages this - `TracingEndpointRequirer` object. Typically, this is `self` in the instantiating - class. - relation_name: an optional string name of the relation between `charm` - and the Tempo charmed service. The default is "tracing". It is strongly - advised not to change the default, so that people deploying your charm will have a - consistent experience with all other charms that provide tracing endpoints. - protocols: optional list of protocols that the charm intends to send traces with. - The provider will enable receivers for these and only these protocols, - so be sure to enable all protocols the charm or its workload are going to need. - - Raises: - RelationNotFoundError: If there is no relation in the charm's metadata.yaml - with the same name as provided via `relation_name` argument. - RelationInterfaceMismatchError: The relation with the same name as provided - via `relation_name` argument does not have the `tracing` relation - interface. - RelationRoleMismatchError: If the relation with the same name as provided - via `relation_name` argument does not have the `RelationRole.provides` - role. - """ - _validate_relation_by_interface_and_direction( - charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.requires - ) - - super().__init__(charm, relation_name) - - self._is_single_endpoint = charm.meta.relations[relation_name].limit == 1 - - self._charm = charm - self._relation_name = relation_name - - events = self._charm.on[self._relation_name] - self.framework.observe(events.relation_changed, self._on_tracing_relation_changed) - self.framework.observe(events.relation_broken, self._on_tracing_relation_broken) - - if protocols: - self.request_protocols(protocols) - - def request_protocols( - self, protocols: Sequence[ReceiverProtocol], relation: Optional[Relation] = None - ): - """Publish the list of protocols which the provider should activate.""" - # todo: should we check if _is_single_endpoint and len(self.relations) > 1 and raise, here? - relations = [relation] if relation else self.relations - - if not protocols: - # empty sequence - raise ValueError( - "You need to pass a nonempty sequence of protocols to `request_protocols`." - ) - - try: - if self._charm.unit.is_leader(): - for relation in relations: - TracingRequirerAppData( - receivers=list(protocols), - ).dump(relation.data[self._charm.app]) - - except ModelError as e: - # args are bytes - msg = e.args[0] - if isinstance(msg, bytes): - if msg.startswith( - b"ERROR cannot read relation application settings: permission denied" - ): - logger.error( - f"encountered error {e} while attempting to request_protocols." - f"The relation must be gone." - ) - return - raise - - @property - def relations(self) -> List[Relation]: - """The tracing relations associated with this endpoint.""" - return self._charm.model.relations[self._relation_name] - - @property - def _relation(self) -> Optional[Relation]: - """If this wraps a single endpoint, the relation bound to it, if any.""" - if not self._is_single_endpoint: - objname = type(self).__name__ - raise AmbiguousRelationUsageError( - f"This {objname} wraps a {self._relation_name} endpoint that has " - "limit != 1. We can't determine what relation, of the possibly many, you are " - f"talking about. Please pass a relation instance while calling {objname}, " - "or set limit=1 in the charm metadata." - ) - relations = self.relations - return relations[0] if relations else None - - def is_ready(self, relation: Optional[Relation] = None): - """Is this endpoint ready?""" - relation = relation or self._relation - if not relation: - logger.debug(f"no relation on {self._relation_name !r}: tracing not ready") - return False - if relation.data is None: - logger.error(f"relation data is None for {relation}") - return False - if not relation.app: - logger.error(f"{relation} event received but there is no relation.app") - return False - try: - databag = dict(relation.data[relation.app]) - TracingProviderAppData.load(databag) - - except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): - logger.info(f"failed validating relation data for {relation}") - return False - return True - - def _on_tracing_relation_changed(self, event): - """Notify the providers that there is new endpoint information available.""" - relation = event.relation - if not self.is_ready(relation): - self.on.endpoint_removed.emit(relation) # type: ignore - return - - data = TracingProviderAppData.load(relation.data[relation.app]) - self.on.endpoint_changed.emit(relation, [i.dict() for i in data.receivers]) # type: ignore - - def _on_tracing_relation_broken(self, event: RelationBrokenEvent): - """Notify the providers that the endpoint is broken.""" - relation = event.relation - self.on.endpoint_removed.emit(relation) # type: ignore - - def get_all_endpoints( - self, relation: Optional[Relation] = None - ) -> Optional[TracingProviderAppData]: - """Unmarshalled relation data.""" - relation = relation or self._relation - if not self.is_ready(relation): - return - return TracingProviderAppData.load(relation.data[relation.app]) # type: ignore - - def _get_endpoint( - self, relation: Optional[Relation], protocol: ReceiverProtocol - ) -> Optional[str]: - app_data = self.get_all_endpoints(relation) - if not app_data: - return None - receivers: List[Receiver] = list( - filter(lambda i: i.protocol.name == protocol, app_data.receivers) - ) - if not receivers: - logger.error(f"no receiver found with protocol={protocol!r}") - return - if len(receivers) > 1: - logger.error( - f"too many receivers with protocol={protocol!r}; using first one. Found: {receivers}" - ) - return - - receiver = receivers[0] - return receiver.url - - def get_endpoint( - self, protocol: ReceiverProtocol, relation: Optional[Relation] = None - ) -> Optional[str]: - """Receiver endpoint for the given protocol. - - It could happen that this function gets called before the provider publishes the endpoints. - In such a scenario, if a non-leader unit calls this function, a permission denied exception will be raised due to - restricted access. To prevent this, this function needs to be guarded by the `is_ready` check. - - Raises: - ProtocolNotRequestedError: - If the charm unit is the leader unit and attempts to obtain an endpoint for a protocol it did not request. - """ - endpoint = self._get_endpoint(relation or self._relation, protocol=protocol) - if not endpoint: - requested_protocols = set() - relations = [relation] if relation else self.relations - for relation in relations: - try: - databag = TracingRequirerAppData.load(relation.data[self._charm.app]) - except DataValidationError: - continue - - requested_protocols.update(databag.receivers) - - if protocol not in requested_protocols: - raise ProtocolNotRequestedError(protocol, relation) - - return None - return endpoint - - -def charm_tracing_config( - endpoint_requirer: TracingEndpointRequirer, cert_path: Optional[Union[Path, str]] -) -> Tuple[Optional[str], Optional[str]]: - """Return the charm_tracing config you likely want. - - If no endpoint is provided: - disable charm tracing. - If https endpoint is provided but cert_path is not found on disk: - disable charm tracing. - If https endpoint is provided and cert_path is None: - ERROR - Else: - proceed with charm tracing (with or without tls, as appropriate) - - Usage: - If you are using charm_tracing >= v1.9: - >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm - >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config - >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") - >>> class MyCharm(...): - >>> _cert_path = "/path/to/cert/on/charm/container.crt" - >>> def __init__(self, ...): - >>> self.tracing = TracingEndpointRequirer(...) - >>> self.my_endpoint, self.cert_path = charm_tracing_config( - ... self.tracing, self._cert_path) - - If you are using charm_tracing < v1.9: - >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm - >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config - >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") - >>> class MyCharm(...): - >>> _cert_path = "/path/to/cert/on/charm/container.crt" - >>> def __init__(self, ...): - >>> self.tracing = TracingEndpointRequirer(...) - >>> self._my_endpoint, self._cert_path = charm_tracing_config( - ... self.tracing, self._cert_path) - >>> @property - >>> def my_endpoint(self): - >>> return self._my_endpoint - >>> @property - >>> def cert_path(self): - >>> return self._cert_path - - """ - if not endpoint_requirer.is_ready(): - return None, None - - endpoint = endpoint_requirer.get_endpoint("otlp_http") - if not endpoint: - return None, None - - is_https = endpoint.startswith("https://") - - if is_https: - if cert_path is None or not Path(cert_path).exists(): - # disable charm tracing until we obtain a cert to prevent tls errors - logger.error( - "Tracing endpoint is https, but no server_cert has been passed." - "Please point @trace_charm to a `server_cert` attr. " - "This might also mean that the tracing provider is related to a " - "certificates provider, but this application is not (yet). " - "In that case, you might just have to wait a bit for the certificates " - "integration to settle. " - ) - return None, None - return endpoint, str(cert_path) - else: - return endpoint, None diff --git a/examples/flask/requirements.txt b/examples/flask/requirements.txt index e87af10..cd0f194 100644 --- a/examples/flask/requirements.txt +++ b/examples/flask/requirements.txt @@ -1,5 +1,4 @@ cosl -jsonschema +jsonschema >=4.19,<4.20 ops >= 2.6 -# pydantic==2.6.4 -https://github.com/canonical/paas-charm/archive/tempo-tracing.tar.gz +pydantic==2.6.4 diff --git a/examples/flask/test_rock/app.py b/examples/flask/test_rock/app.py index c658026..7d0b087 100644 --- a/examples/flask/test_rock/app.py +++ b/examples/flask/test_rock/app.py @@ -55,10 +55,6 @@ def __call__(self, *args: object, **kwargs: object) -> object: app = Flask(__name__) app.config.from_prefixed_env() -from opentelemetry.instrumentation.flask import FlaskInstrumentor - -FlaskInstrumentor().instrument_app(app) - broker_url = os.environ.get("REDIS_DB_CONNECT_STRING") # Configure Celery only if Redis is configured celery_app = celery_init_app(app, broker_url) diff --git a/examples/flask/test_rock/requirements.txt b/examples/flask/test_rock/requirements.txt index dde3416..2ff69c0 100644 --- a/examples/flask/test_rock/requirements.txt +++ b/examples/flask/test_rock/requirements.txt @@ -8,12 +8,3 @@ redis[hiredis] boto3 pika celery -googleapis-common-protos -opentelemetry-api -opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http -opentelemetry-instrumentation -opentelemetry-instrumentation-flask -opentelemetry-instrumentation-wsgi -opentelemetry-sdk -opentelemetry-distro diff --git a/examples/flask/test_rock/rockcraft.yaml b/examples/flask/test_rock/rockcraft.yaml index 4c9565f..8dcdf56 100644 --- a/examples/flask/test_rock/rockcraft.yaml +++ b/examples/flask/test_rock/rockcraft.yaml @@ -25,10 +25,3 @@ services: startup: enabled user: _daemon_ working-dir: /flask/app - -parts: - flask-framework/dependencies: - override-stage: | - pip install opentelemetry-distro - craftctl default - opentelemetry-bootstrap diff --git a/src/paas_charm/_gunicorn/charm.py b/src/paas_charm/_gunicorn/charm.py index 7d74b4f..0273687 100644 --- a/src/paas_charm/_gunicorn/charm.py +++ b/src/paas_charm/_gunicorn/charm.py @@ -2,7 +2,6 @@ # See LICENSE file for licensing details. """The base charm class for all charms.""" - import logging from paas_charm._gunicorn.webserver import GunicornWebserver, WebserverConfig From 99172b1d1a8deab120fa620c4632d8e42d655292 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 18 Dec 2024 10:52:20 +0300 Subject: [PATCH 10/70] chore(): More cleaning --- src/paas_charm/_gunicorn/webserver.py | 30 --------------------------- src/paas_charm/charm.py | 4 +--- 2 files changed, 1 insertion(+), 33 deletions(-) diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index 87853e9..854ed72 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -130,43 +130,13 @@ def _config(self) -> str: error_log = repr( APPLICATION_ERROR_LOG_FILE_FMT.format(framework=self._workload_config.framework) ) - framework_environments = self._container.get_plan().to_dict()["services"][ - self._workload_config.framework - ]["environment"] - tracing_uri = None - tracing_service_name = None - if framework_environments.get("OTEL_EXPORTER_OTLP_ENDPOINT", None): - tracing_uri = framework_environments["OTEL_EXPORTER_OTLP_ENDPOINT"] - tracing_service_name = framework_environments["OTEL_SERVICE_NAME"] - # check if opentelemetry stuff are installed but not here. - # if they are installed then use them if not go into blocked state config = textwrap.dedent( f"""\ - from opentelemetry import trace - from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter - from opentelemetry.sdk.resources import Resource - from opentelemetry.sdk.trace import TracerProvider - from opentelemetry.sdk.trace.export import BatchSpanProcessor - bind = ['0.0.0.0:{self._workload_config.port}'] chdir = {repr(str(self._workload_config.app_dir))} accesslog = {access_log} errorlog = {error_log} statsd_host = {repr(STATSD_HOST)} - def post_fork(server, worker): - resource = Resource.create( - attributes={{ - "service.name": "{tracing_service_name}", - "worker": worker.pid, - }} - ) - trace.set_tracer_provider(TracerProvider(resource=resource)) - span_processor = BatchSpanProcessor( - OTLPSpanExporter( - endpoint="{tracing_uri}/v1/traces" - ) - ) - trace.get_tracer_provider().add_span_processor(span_processor) """ ) config += "\n".join(config_entries) diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 2fd7287..3de26c6 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -347,9 +347,7 @@ def _gen_environment(self) -> dict[str, str]: Returns: A dictionary representing the application environment variables. """ - env = self._create_app().gen_environment() - - return env + return self._create_app().gen_environment() def _create_charm_state(self) -> CharmState: """Create charm state. From f9991150cd6532277261ac88309cf16c4ea12a6c Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 18 Dec 2024 10:54:04 +0300 Subject: [PATCH 11/70] chore(): Remove empty line --- src/paas_charm/charm.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 3de26c6..fe2d6ed 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -374,7 +374,6 @@ def _create_charm_state(self) -> CharmState: endpoint=self._tracing.get_endpoint(protocol="otlp_http"), service_name=f"{self.framework.meta.name}-charm", ) - return CharmState.from_charm( config=config, framework=self._framework_name, From b3f845bb5a1c77e97045fa5b2563836fd4c3c2f1 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 18 Dec 2024 14:58:57 +0300 Subject: [PATCH 12/70] chore(): Add unit test for env variables. --- examples/flask/charmcraft.yaml | 4 + .../tempo_coordinator_k8s/v0/tracing.py | 998 ++++++++++++++++++ src/paas_charm/__init__.py | 7 - src/paas_charm/charm.py | 22 +- tests/unit/flask/test_tracing.py | 39 + 5 files changed, 1054 insertions(+), 16 deletions(-) create mode 100644 examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py create mode 100644 tests/unit/flask/test_tracing.py diff --git a/examples/flask/charmcraft.yaml b/examples/flask/charmcraft.yaml index 292dba0..190e83b 100644 --- a/examples/flask/charmcraft.yaml +++ b/examples/flask/charmcraft.yaml @@ -130,6 +130,10 @@ requires: interface: rabbitmq optional: True limit: 1 + tracing: + interface: tracing + optional: True + limit: 1 resources: flask-app-image: diff --git a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py new file mode 100644 index 0000000..4516af6 --- /dev/null +++ b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -0,0 +1,998 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +"""## Overview. + +This document explains how to integrate with the Tempo charm for the purpose of pushing traces to a +tracing endpoint provided by Tempo. It also explains how alternative implementations of the Tempo charm +may maintain the same interface and be backward compatible with all currently integrated charms. + +## Requirer Library Usage + +Charms seeking to push traces to Tempo, must do so using the `TracingEndpointRequirer` +object from this charm library. For the simplest use cases, using the `TracingEndpointRequirer` +object only requires instantiating it, typically in the constructor of your charm. The +`TracingEndpointRequirer` constructor requires the name of the relation over which a tracing endpoint + is exposed by the Tempo charm, and a list of protocols it intends to send traces with. + This relation must use the `tracing` interface. + The `TracingEndpointRequirer` object may be instantiated as follows + + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer + + def __init__(self, *args): + super().__init__(*args) + # ... + self.tracing = TracingEndpointRequirer(self, + protocols=['otlp_grpc', 'otlp_http', 'jaeger_http_thrift'] + ) + # ... + +Note that the first argument (`self`) to `TracingEndpointRequirer` is always a reference to the +parent charm. + +Alternatively to providing the list of requested protocols at init time, the charm can do it at +any point in time by calling the +`TracingEndpointRequirer.request_protocols(*protocol:str, relation:Optional[Relation])` method. +Using this method also allows you to use per-relation protocols. + +Units of requirer charms obtain the tempo endpoint to which they will push their traces by calling +`TracingEndpointRequirer.get_endpoint(protocol: str)`, where `protocol` is, for example: +- `otlp_grpc` +- `otlp_http` +- `zipkin` +- `tempo` + +If the `protocol` is not in the list of protocols that the charm requested at endpoint set-up time, +the library will raise an error. + +We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests +go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down. + +## Provider Library Usage + +The `TracingEndpointProvider` object may be used by charms to manage relations with their +trace sources. For this purposes a Tempo-like charm needs to do two things + +1. Instantiate the `TracingEndpointProvider` object by providing it a +reference to the parent (Tempo) charm and optionally the name of the relation that the Tempo charm +uses to interact with its trace sources. This relation must conform to the `tracing` interface +and it is strongly recommended that this relation be named `tracing` which is its +default value. + +For example a Tempo charm may instantiate the `TracingEndpointProvider` in its constructor as +follows + + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointProvider + + def __init__(self, *args): + super().__init__(*args) + # ... + self.tracing = TracingEndpointProvider(self) + # ... + + + +""" # noqa: W505 +import enum +import json +import logging +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Dict, + List, + Literal, + MutableMapping, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +import pydantic +from ops.charm import CharmBase, CharmEvents, RelationBrokenEvent, RelationEvent, RelationRole +from ops.framework import EventSource, Object +from ops.model import ModelError, Relation +from pydantic import BaseModel, Field + +# The unique Charmhub library identifier, never change it +LIBID = "d2f02b1f8d1244b5989fd55bc3a28943" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 3 + +PYDEPS = ["pydantic"] + +logger = logging.getLogger(__name__) + +DEFAULT_RELATION_NAME = "tracing" +RELATION_INTERFACE_NAME = "tracing" + +# Supported list rationale https://github.com/canonical/tempo-coordinator-k8s-operator/issues/8 +ReceiverProtocol = Literal[ + "zipkin", + "otlp_grpc", + "otlp_http", + "jaeger_grpc", + "jaeger_thrift_http", +] + +RawReceiver = Tuple[ReceiverProtocol, str] +"""Helper type. A raw receiver is defined as a tuple consisting of the protocol name, and the (external, if available), +(secured, if available) resolvable server url. +""" + +BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"} + + +class TransportProtocolType(str, enum.Enum): + """Receiver Type.""" + + http = "http" + grpc = "grpc" + + +receiver_protocol_to_transport_protocol: Dict[ReceiverProtocol, TransportProtocolType] = { + "zipkin": TransportProtocolType.http, + "otlp_grpc": TransportProtocolType.grpc, + "otlp_http": TransportProtocolType.http, + "jaeger_thrift_http": TransportProtocolType.http, + "jaeger_grpc": TransportProtocolType.grpc, +} +"""A mapping between telemetry protocols and their corresponding transport protocol. +""" + + +class TracingError(Exception): + """Base class for custom errors raised by this library.""" + + +class NotReadyError(TracingError): + """Raised by the provider wrapper if a requirer hasn't published the required data (yet).""" + + +class ProtocolNotRequestedError(TracingError): + """Raised if the user attempts to obtain an endpoint for a protocol it did not request.""" + + +class DataValidationError(TracingError): + """Raised when data validation fails on IPU relation data.""" + + +class AmbiguousRelationUsageError(TracingError): + """Raised when one wrongly assumes that there can only be one relation on an endpoint.""" + + +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class DatabagModel(BaseModel): # type: ignore + """Base databag model.""" + + class Config: + """Pydantic config.""" + + # ignore any extra fields in the databag + extra = "ignore" + """Ignore any extra fields in the databag.""" + allow_population_by_field_name = True + """Allow instantiating this class by field name (instead of forcing alias).""" + + _NEST_UNDER = None + + @classmethod + def load(cls, databag: MutableMapping): + """Load this model from a Juju databag.""" + if cls._NEST_UNDER: + return cls.parse_obj(json.loads(databag[cls._NEST_UNDER])) + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {f.alias for f in cls.__fields__.values()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.parse_raw(json.dumps(data)) # type: ignore + except pydantic.ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): + """Write the contents of this model to Juju databag. + + :param databag: the databag to write the data to. + :param clear: ensure the databag is cleared before writing it. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + + if self._NEST_UNDER: + databag[self._NEST_UNDER] = self.json(by_alias=True) + return databag + + dct = self.dict() + for key, field in self.__fields__.items(): # type: ignore + value = dct[key] + databag[field.alias or key] = json.dumps(value) + + return databag + +else: + from pydantic import ConfigDict + + class DatabagModel(BaseModel): + """Base databag model.""" + + model_config = ConfigDict( + # ignore any extra fields in the databag + extra="ignore", + # Allow instantiating this class by field name (instead of forcing alias). + populate_by_name=True, + # Custom config key: whether to nest the whole datastructure (as json) + # under a field or spread it out at the toplevel. + _NEST_UNDER=None, # type: ignore + ) + """Pydantic config.""" + + @classmethod + def load(cls, databag: MutableMapping): + """Load this model from a Juju databag.""" + nest_under = cls.model_config.get("_NEST_UNDER") # type: ignore + if nest_under: + return cls.model_validate(json.loads(databag[nest_under])) # type: ignore + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {(f.alias or n) for n, f in cls.__fields__.items()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.model_validate_json(json.dumps(data)) # type: ignore + except pydantic.ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): + """Write the contents of this model to Juju databag. + + :param databag: the databag to write the data to. + :param clear: ensure the databag is cleared before writing it. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + nest_under = self.model_config.get("_NEST_UNDER") + if nest_under: + databag[nest_under] = self.model_dump_json( # type: ignore + by_alias=True, + # skip keys whose values are default + exclude_defaults=True, + ) + return databag + + dct = self.model_dump() # type: ignore + for key, field in self.model_fields.items(): # type: ignore + value = dct[key] + if value == field.default: + continue + databag[field.alias or key] = json.dumps(value) + + return databag + + +# todo use models from charm-relation-interfaces +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class ProtocolType(BaseModel): # type: ignore + """Protocol Type.""" + + class Config: + """Pydantic config.""" + + use_enum_values = True + """Allow serializing enum values.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + +else: + + class ProtocolType(BaseModel): + """Protocol Type.""" + + model_config = ConfigDict( # type: ignore + # Allow serializing enum values. + use_enum_values=True + ) + """Pydantic config.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + + +class Receiver(BaseModel): + """Specification of an active receiver.""" + + protocol: ProtocolType = Field(..., description="Receiver protocol name and type.") + url: str = Field( + ..., + description="""URL at which the receiver is reachable. If there's an ingress, it would be the external URL. + Otherwise, it would be the service's fqdn or internal IP. + If the protocol type is grpc, the url will not contain a scheme.""", + examples=[ + "http://traefik_address:2331", + "https://traefik_address:2331", + "http://tempo_public_ip:2331", + "https://tempo_public_ip:2331", + "tempo_public_ip:2331", + ], + ) + + +class TracingProviderAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing provider.""" + + receivers: List[Receiver] = Field( + ..., + description="List of all receivers enabled on the tracing provider.", + ) + + +class TracingRequirerAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing requirer.""" + + receivers: List[ReceiverProtocol] + """Requested receivers.""" + + +class _AutoSnapshotEvent(RelationEvent): + __args__: Tuple[str, ...] = () + __optional_kwargs__: Dict[str, Any] = {} + + @classmethod + def __attrs__(cls): + return cls.__args__ + tuple(cls.__optional_kwargs__.keys()) + + def __init__(self, handle, relation, *args, **kwargs): + super().__init__(handle, relation) + + if not len(self.__args__) == len(args): + raise TypeError("expected {} args, got {}".format(len(self.__args__), len(args))) + + for attr, obj in zip(self.__args__, args): + setattr(self, attr, obj) + for attr, default in self.__optional_kwargs__.items(): + obj = kwargs.get(attr, default) + setattr(self, attr, obj) + + def snapshot(self) -> dict: + dct = super().snapshot() + for attr in self.__attrs__(): + obj = getattr(self, attr) + try: + dct[attr] = obj + except ValueError as e: + raise ValueError( + "cannot automagically serialize {}: " + "override this method and do it " + "manually.".format(obj) + ) from e + + return dct + + def restore(self, snapshot: dict) -> None: + super().restore(snapshot) + for attr, obj in snapshot.items(): + setattr(self, attr, obj) + + +class RelationNotFoundError(Exception): + """Raised if no relation with the given name is found.""" + + def __init__(self, relation_name: str): + self.relation_name = relation_name + self.message = "No relation named '{}' found".format(relation_name) + super().__init__(self.message) + + +class RelationInterfaceMismatchError(Exception): + """Raised if the relation with the given name has an unexpected interface.""" + + def __init__( + self, + relation_name: str, + expected_relation_interface: str, + actual_relation_interface: str, + ): + self.relation_name = relation_name + self.expected_relation_interface = expected_relation_interface + self.actual_relation_interface = actual_relation_interface + self.message = ( + "The '{}' relation has '{}' as interface rather than the expected '{}'".format( + relation_name, actual_relation_interface, expected_relation_interface + ) + ) + + super().__init__(self.message) + + +class RelationRoleMismatchError(Exception): + """Raised if the relation with the given name has a different role than expected.""" + + def __init__( + self, + relation_name: str, + expected_relation_role: RelationRole, + actual_relation_role: RelationRole, + ): + self.relation_name = relation_name + self.expected_relation_interface = expected_relation_role + self.actual_relation_role = actual_relation_role + self.message = "The '{}' relation has role '{}' rather than the expected '{}'".format( + relation_name, repr(actual_relation_role), repr(expected_relation_role) + ) + + super().__init__(self.message) + + +def _validate_relation_by_interface_and_direction( + charm: CharmBase, + relation_name: str, + expected_relation_interface: str, + expected_relation_role: RelationRole, +): + """Validate a relation. + + Verifies that the `relation_name` provided: (1) exists in metadata.yaml, + (2) declares as interface the interface name passed as `relation_interface` + and (3) has the right "direction", i.e., it is a relation that `charm` + provides or requires. + + Args: + charm: a `CharmBase` object to scan for the matching relation. + relation_name: the name of the relation to be verified. + expected_relation_interface: the interface name to be matched by the + relation named `relation_name`. + expected_relation_role: whether the `relation_name` must be either + provided or required by `charm`. + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the same relation interface + as specified via the `expected_relation_interface` argument. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the same role as specified + via the `expected_relation_role` argument. + """ + if relation_name not in charm.meta.relations: + raise RelationNotFoundError(relation_name) + + relation = charm.meta.relations[relation_name] + + # fixme: why do we need to cast here? + actual_relation_interface = cast(str, relation.interface_name) + + if actual_relation_interface != expected_relation_interface: + raise RelationInterfaceMismatchError( + relation_name, expected_relation_interface, actual_relation_interface + ) + + if expected_relation_role is RelationRole.provides: + if relation_name not in charm.meta.provides: + raise RelationRoleMismatchError( + relation_name, RelationRole.provides, RelationRole.requires + ) + elif expected_relation_role is RelationRole.requires: + if relation_name not in charm.meta.requires: + raise RelationRoleMismatchError( + relation_name, RelationRole.requires, RelationRole.provides + ) + else: + raise TypeError("Unexpected RelationDirection: {}".format(expected_relation_role)) + + +class RequestEvent(RelationEvent): + """Event emitted when a remote requests a tracing endpoint.""" + + @property + def requested_receivers(self) -> List[ReceiverProtocol]: + """List of receiver protocols that have been requested.""" + relation = self.relation + app = relation.app + if not app: + raise NotReadyError("relation.app is None") + + return TracingRequirerAppData.load(relation.data[app]).receivers + + +class BrokenEvent(RelationBrokenEvent): + """Event emitted when a relation on tracing is broken.""" + + +class TracingEndpointProviderEvents(CharmEvents): + """TracingEndpointProvider events.""" + + request = EventSource(RequestEvent) + broken = EventSource(BrokenEvent) + + +class TracingEndpointProvider(Object): + """Class representing a trace receiver service.""" + + on = TracingEndpointProviderEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + external_url: Optional[str] = None, + relation_name: str = DEFAULT_RELATION_NAME, + ): + """Initialize. + + Args: + charm: a `CharmBase` instance that manages this instance of the Tempo service. + external_url: external address of the node hosting the tempo server, + if an ingress is present. + relation_name: an optional string name of the relation between `charm` + and the Tempo charmed service. The default is "tracing". + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the `tracing` relation + interface. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the `RelationRole.requires` + role. + """ + _validate_relation_by_interface_and_direction( + charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.provides + ) + + super().__init__(charm, relation_name + "tracing-provider") + self._charm = charm + self._external_url = external_url + self._relation_name = relation_name + self.framework.observe( + self._charm.on[relation_name].relation_joined, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_created, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_changed, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_broken, self._on_relation_broken_event + ) + + def _on_relation_broken_event(self, e: RelationBrokenEvent): + """Handle relation broken events.""" + self.on.broken.emit(e.relation) + + def _on_relation_event(self, e: RelationEvent): + """Handle relation created/joined/changed events.""" + if self.is_requirer_ready(e.relation): + self.on.request.emit(e.relation) + + def is_requirer_ready(self, relation: Relation): + """Attempt to determine if requirer has already populated app data.""" + try: + self._get_requested_protocols(relation) + except NotReadyError: + return False + return True + + @staticmethod + def _get_requested_protocols(relation: Relation): + app = relation.app + if not app: + raise NotReadyError("relation.app is None") + + try: + databag = TracingRequirerAppData.load(relation.data[app]) + except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): + logger.info(f"relation {relation} is not ready to talk tracing") + raise NotReadyError() + return databag.receivers + + def requested_protocols(self): + """All receiver protocols that have been requested by our related apps.""" + requested_protocols = set() + for relation in self.relations: + try: + protocols = self._get_requested_protocols(relation) + except NotReadyError: + continue + requested_protocols.update(protocols) + return requested_protocols + + @property + def relations(self) -> List[Relation]: + """All relations active on this endpoint.""" + return self._charm.model.relations[self._relation_name] + + def publish_receivers(self, receivers: Sequence[RawReceiver]): + """Let all requirers know that these receivers are active and listening.""" + if not self._charm.unit.is_leader(): + raise RuntimeError("only leader can do this") + + for relation in self.relations: + try: + TracingProviderAppData( + receivers=[ + Receiver( + url=url, + protocol=ProtocolType( + name=protocol, + type=receiver_protocol_to_transport_protocol[protocol], + ), + ) + for protocol, url in receivers + ], + ).dump(relation.data[self._charm.app]) + + except ModelError as e: + # args are bytes + msg = e.args[0] + if isinstance(msg, bytes): + if msg.startswith( + b"ERROR cannot read relation application settings: permission denied" + ): + logger.error( + f"encountered error {e} while attempting to update_relation_data." + f"The relation must be gone." + ) + continue + raise + + +class EndpointRemovedEvent(RelationBrokenEvent): + """Event representing a change in one of the receiver endpoints.""" + + +class EndpointChangedEvent(_AutoSnapshotEvent): + """Event representing a change in one of the receiver endpoints.""" + + __args__ = ("_receivers",) + + if TYPE_CHECKING: + _receivers = [] # type: List[dict] + + @property + def receivers(self) -> List[Receiver]: + """Cast receivers back from dict.""" + return [Receiver(**i) for i in self._receivers] + + +class TracingEndpointRequirerEvents(CharmEvents): + """TracingEndpointRequirer events.""" + + endpoint_changed = EventSource(EndpointChangedEvent) + endpoint_removed = EventSource(EndpointRemovedEvent) + + +class TracingEndpointRequirer(Object): + """A tracing endpoint for Tempo.""" + + on = TracingEndpointRequirerEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + relation_name: str = DEFAULT_RELATION_NAME, + protocols: Optional[List[ReceiverProtocol]] = None, + ): + """Construct a tracing requirer for a Tempo charm. + + If your application supports pushing traces to a distributed tracing backend, the + `TracingEndpointRequirer` object enables your charm to easily access endpoint information + exchanged over a `tracing` relation interface. + + Args: + charm: a `CharmBase` object that manages this + `TracingEndpointRequirer` object. Typically, this is `self` in the instantiating + class. + relation_name: an optional string name of the relation between `charm` + and the Tempo charmed service. The default is "tracing". It is strongly + advised not to change the default, so that people deploying your charm will have a + consistent experience with all other charms that provide tracing endpoints. + protocols: optional list of protocols that the charm intends to send traces with. + The provider will enable receivers for these and only these protocols, + so be sure to enable all protocols the charm or its workload are going to need. + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the `tracing` relation + interface. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the `RelationRole.provides` + role. + """ + _validate_relation_by_interface_and_direction( + charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.requires + ) + + super().__init__(charm, relation_name) + + self._is_single_endpoint = charm.meta.relations[relation_name].limit == 1 + + self._charm = charm + self._relation_name = relation_name + + events = self._charm.on[self._relation_name] + self.framework.observe(events.relation_changed, self._on_tracing_relation_changed) + self.framework.observe(events.relation_broken, self._on_tracing_relation_broken) + + if protocols: + self.request_protocols(protocols) + + def request_protocols( + self, protocols: Sequence[ReceiverProtocol], relation: Optional[Relation] = None + ): + """Publish the list of protocols which the provider should activate.""" + # todo: should we check if _is_single_endpoint and len(self.relations) > 1 and raise, here? + relations = [relation] if relation else self.relations + + if not protocols: + # empty sequence + raise ValueError( + "You need to pass a nonempty sequence of protocols to `request_protocols`." + ) + + try: + if self._charm.unit.is_leader(): + for relation in relations: + TracingRequirerAppData( + receivers=list(protocols), + ).dump(relation.data[self._charm.app]) + + except ModelError as e: + # args are bytes + msg = e.args[0] + if isinstance(msg, bytes): + if msg.startswith( + b"ERROR cannot read relation application settings: permission denied" + ): + logger.error( + f"encountered error {e} while attempting to request_protocols." + f"The relation must be gone." + ) + return + raise + + @property + def relations(self) -> List[Relation]: + """The tracing relations associated with this endpoint.""" + return self._charm.model.relations[self._relation_name] + + @property + def _relation(self) -> Optional[Relation]: + """If this wraps a single endpoint, the relation bound to it, if any.""" + if not self._is_single_endpoint: + objname = type(self).__name__ + raise AmbiguousRelationUsageError( + f"This {objname} wraps a {self._relation_name} endpoint that has " + "limit != 1. We can't determine what relation, of the possibly many, you are " + f"talking about. Please pass a relation instance while calling {objname}, " + "or set limit=1 in the charm metadata." + ) + relations = self.relations + return relations[0] if relations else None + + def is_ready(self, relation: Optional[Relation] = None): + """Is this endpoint ready?""" + relation = relation or self._relation + if not relation: + logger.debug(f"no relation on {self._relation_name !r}: tracing not ready") + return False + if relation.data is None: + logger.error(f"relation data is None for {relation}") + return False + if not relation.app: + logger.error(f"{relation} event received but there is no relation.app") + return False + try: + databag = dict(relation.data[relation.app]) + TracingProviderAppData.load(databag) + + except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): + logger.info(f"failed validating relation data for {relation}") + return False + return True + + def _on_tracing_relation_changed(self, event): + """Notify the providers that there is new endpoint information available.""" + relation = event.relation + if not self.is_ready(relation): + self.on.endpoint_removed.emit(relation) # type: ignore + return + + data = TracingProviderAppData.load(relation.data[relation.app]) + self.on.endpoint_changed.emit(relation, [i.dict() for i in data.receivers]) # type: ignore + + def _on_tracing_relation_broken(self, event: RelationBrokenEvent): + """Notify the providers that the endpoint is broken.""" + relation = event.relation + self.on.endpoint_removed.emit(relation) # type: ignore + + def get_all_endpoints( + self, relation: Optional[Relation] = None + ) -> Optional[TracingProviderAppData]: + """Unmarshalled relation data.""" + relation = relation or self._relation + if not self.is_ready(relation): + return + return TracingProviderAppData.load(relation.data[relation.app]) # type: ignore + + def _get_endpoint( + self, relation: Optional[Relation], protocol: ReceiverProtocol + ) -> Optional[str]: + app_data = self.get_all_endpoints(relation) + if not app_data: + return None + receivers: List[Receiver] = list( + filter(lambda i: i.protocol.name == protocol, app_data.receivers) + ) + if not receivers: + logger.error(f"no receiver found with protocol={protocol!r}") + return + if len(receivers) > 1: + logger.error( + f"too many receivers with protocol={protocol!r}; using first one. Found: {receivers}" + ) + return + + receiver = receivers[0] + return receiver.url + + def get_endpoint( + self, protocol: ReceiverProtocol, relation: Optional[Relation] = None + ) -> Optional[str]: + """Receiver endpoint for the given protocol. + + It could happen that this function gets called before the provider publishes the endpoints. + In such a scenario, if a non-leader unit calls this function, a permission denied exception will be raised due to + restricted access. To prevent this, this function needs to be guarded by the `is_ready` check. + + Raises: + ProtocolNotRequestedError: + If the charm unit is the leader unit and attempts to obtain an endpoint for a protocol it did not request. + """ + endpoint = self._get_endpoint(relation or self._relation, protocol=protocol) + if not endpoint: + requested_protocols = set() + relations = [relation] if relation else self.relations + for relation in relations: + try: + databag = TracingRequirerAppData.load(relation.data[self._charm.app]) + except DataValidationError: + continue + + requested_protocols.update(databag.receivers) + + if protocol not in requested_protocols: + raise ProtocolNotRequestedError(protocol, relation) + + return None + return endpoint + + +def charm_tracing_config( + endpoint_requirer: TracingEndpointRequirer, cert_path: Optional[Union[Path, str]] +) -> Tuple[Optional[str], Optional[str]]: + """Return the charm_tracing config you likely want. + + If no endpoint is provided: + disable charm tracing. + If https endpoint is provided but cert_path is not found on disk: + disable charm tracing. + If https endpoint is provided and cert_path is None: + ERROR + Else: + proceed with charm tracing (with or without tls, as appropriate) + + Usage: + If you are using charm_tracing >= v1.9: + >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config + >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") + >>> class MyCharm(...): + >>> _cert_path = "/path/to/cert/on/charm/container.crt" + >>> def __init__(self, ...): + >>> self.tracing = TracingEndpointRequirer(...) + >>> self.my_endpoint, self.cert_path = charm_tracing_config( + ... self.tracing, self._cert_path) + + If you are using charm_tracing < v1.9: + >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config + >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") + >>> class MyCharm(...): + >>> _cert_path = "/path/to/cert/on/charm/container.crt" + >>> def __init__(self, ...): + >>> self.tracing = TracingEndpointRequirer(...) + >>> self._my_endpoint, self._cert_path = charm_tracing_config( + ... self.tracing, self._cert_path) + >>> @property + >>> def my_endpoint(self): + >>> return self._my_endpoint + >>> @property + >>> def cert_path(self): + >>> return self._cert_path + + """ + if not endpoint_requirer.is_ready(): + return None, None + + endpoint = endpoint_requirer.get_endpoint("otlp_http") + if not endpoint: + return None, None + + is_https = endpoint.startswith("https://") + + if is_https: + if cert_path is None or not Path(cert_path).exists(): + # disable charm tracing until we obtain a cert to prevent tls errors + logger.error( + "Tracing endpoint is https, but no server_cert has been passed." + "Please point @trace_charm to a `server_cert` attr. " + "This might also mean that the tracing provider is related to a " + "certificates provider, but this application is not (yet). " + "In that case, you might just have to wait a bit for the certificates " + "integration to settle. " + ) + return None, None + return endpoint, str(cert_path) + else: + return endpoint, None diff --git a/src/paas_charm/__init__.py b/src/paas_charm/__init__.py index 8fc5d13..2457d3c 100644 --- a/src/paas_charm/__init__.py +++ b/src/paas_charm/__init__.py @@ -58,10 +58,3 @@ raise exceptions.MissingCharmLibraryError( "Missing charm library, please run `charmcraft fetch-lib charms.redis_k8s.v0.redis`" ) from import_error -try: - import charms.tempo_coordinator_k8s.v0.tracing # noqa: F401 -except ImportError as import_error: - raise exceptions.MissingCharmLibraryError( - "Missing charm library, please run " - "`charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing`" - ) from import_error diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index fe2d6ed..dc03092 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -9,7 +9,6 @@ import ops from charms.data_platform_libs.v0.data_interfaces import DatabaseRequiresEvent from charms.redis_k8s.v0.redis import RedisRelationCharmEvents, RedisRequires -from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer from charms.traefik_k8s.v2.ingress import IngressPerAppRequirer from ops.model import Container from pydantic import BaseModel, ValidationError @@ -45,6 +44,15 @@ "Missing charm library, please run `charmcraft fetch-lib charms.saml_integrator.v0.saml`" ) +try: + # pylint: disable=ungrouped-imports + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer +except ImportError: + logger.exception( + "Missing charm library, please run " + "`charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing`" + ) + class PaasCharm(abc.ABC, ops.CharmBase): # pylint: disable=too-many-instance-attributes """PaasCharm base charm service mixin. @@ -120,14 +128,10 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self._rabbitmq = None if "tracing" in requires and requires["tracing"].interface_name == "tracing": - try: - self._tracing = TracingEndpointRequirer( - self, relation_name="tracing", protocols=["otlp_http"] - ) - # add self.framework.observe for relation changed and departed - except NameError: - self.update_app_and_unit_status(ops.BlockedStatus("Can not initialize tracing.")) - self._tracing = None + self._tracing = TracingEndpointRequirer( + self, relation_name="tracing", protocols=["otlp_http"] + ) + # add self.framework.observe for relation changed and departed else: self._tracing = None diff --git a/tests/unit/flask/test_tracing.py b/tests/unit/flask/test_tracing.py new file mode 100644 index 0000000..4b11c7e --- /dev/null +++ b/tests/unit/flask/test_tracing.py @@ -0,0 +1,39 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Flask charm tracing relation unit tests.""" + +import unittest.mock + +import ops +import pytest +from ops.testing import Harness + +from .constants import DEFAULT_LAYER, FLASK_CONTAINER_NAME + + +def test_tracing_relation(harness: Harness): + """ + arrange: Integrate the charm with the Tempo charm. + act: Run all initial hooks. + assert: The flask service should have the environment variable OTEL_EXPORTER_OTLP_ENDPOINT from + the tracing relation. It should also have the environment variable OTEL_SERVICE_NAME set to "flask-k8s-charm". + """ + harness.set_model_name("flask-model") + harness.add_relation( + "tracing", + "tempo-coordinator", + app_data={ + "receivers": '[{"protocol": {"name": "otlp_http", "type": "http"}, "url": "http://test-ip:4318"}]' + }, + ) + container = harness.model.unit.get_container(FLASK_CONTAINER_NAME) + container.add_layer("a_layer", DEFAULT_LAYER) + + harness.begin_with_initial_hooks() + + assert harness.model.unit.status == ops.ActiveStatus() + service_env = container.get_plan().services["flask"].environment + print(service_env) + assert service_env["OTEL_EXPORTER_OTLP_ENDPOINT"] == "http://test-ip:4318" + assert service_env["OTEL_SERVICE_NAME"] == "flask-k8s-charm" From ca89e47e86fb6f09edbee163ae204399c7d46de8 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Thu, 19 Dec 2024 12:55:14 +0300 Subject: [PATCH 13/70] chore(): Addressed comment --- src/paas_charm/app.py | 12 +- src/paas_charm/charm.py | 219 ++++++++++++++++++++++--------- src/paas_charm/charm_state.py | 194 +++++++++++++-------------- tests/unit/flask/test_tracing.py | 2 +- 4 files changed, 256 insertions(+), 171 deletions(-) diff --git a/src/paas_charm/app.py b/src/paas_charm/app.py index c892148..7ef822e 100644 --- a/src/paas_charm/app.py +++ b/src/paas_charm/app.py @@ -260,14 +260,10 @@ def map_integrations_to_env(integrations: IntegrationsState, prefix: str = "") - interface_envvars = _db_url_to_env_variables(interface_name.upper(), uri) env.update(interface_envvars) if integrations.tracing_relation_data: - env.update( - (k, v) - for k, v in ( - ("OTEL_SERVICE_NAME", integrations.tracing_relation_data.service_name), - ("OTEL_EXPORTER_OTLP_ENDPOINT", integrations.tracing_relation_data.endpoint), - ) - if v is not None - ) + if service_name := integrations.tracing_relation_data.service_name: + env.update({"OTEL_SERVICE_NAME": service_name}) + if endpoint := integrations.tracing_relation_data.endpoint: + env.update({"OTEL_EXPORTER_OTLP_ENDPOINT": endpoint}) if integrations.s3_parameters: s3 = integrations.s3_parameters diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index dc03092..018080f 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -10,6 +10,7 @@ from charms.data_platform_libs.v0.data_interfaces import DatabaseRequiresEvent from charms.redis_k8s.v0.redis import RedisRelationCharmEvents, RedisRequires from charms.traefik_k8s.v2.ingress import IngressPerAppRequirer +from ops import RelationMeta from ops.model import Container from pydantic import BaseModel, ValidationError @@ -79,8 +80,6 @@ def _create_app(self) -> App: on = RedisRelationCharmEvents() - # pylint: disable=too-many-statements - # disabled because we have too many possible integrations for the workload. def __init__(self, framework: ops.Framework, framework_name: str) -> None: """Initialize the instance. @@ -93,47 +92,12 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self._secret_storage = KeySecretStorage(charm=self, key=f"{framework_name}_secret_key") self._database_requirers = make_database_requirers(self, self.app.name) - requires = self.framework.meta.requires - if "redis" in requires and requires["redis"].interface_name == "redis": - self._redis = RedisRequires(charm=self, relation_name="redis") - self.framework.observe(self.on.redis_relation_updated, self._on_redis_relation_updated) - else: - self._redis = None - - if "s3" in requires and requires["s3"].interface_name == "s3": - self._s3 = S3Requirer(charm=self, relation_name="s3", bucket_name=self.app.name) - self.framework.observe(self._s3.on.credentials_changed, self._on_s3_credential_changed) - self.framework.observe(self._s3.on.credentials_gone, self._on_s3_credential_gone) - else: - self._s3 = None - - if "saml" in requires and requires["saml"].interface_name == "saml": - self._saml = SamlRequires(self) - self.framework.observe(self._saml.on.saml_data_available, self._on_saml_data_available) - else: - self._saml = None - - self._rabbitmq: RabbitMQRequires | None - if "rabbitmq" in requires and requires["rabbitmq"].interface_name == "rabbitmq": - self._rabbitmq = RabbitMQRequires( - self, - "rabbitmq", - username=self.app.name, - vhost="/", - ) - self.framework.observe(self._rabbitmq.on.connected, self._on_rabbitmq_connected) - self.framework.observe(self._rabbitmq.on.ready, self._on_rabbitmq_ready) - self.framework.observe(self._rabbitmq.on.departed, self._on_rabbitmq_departed) - else: - self._rabbitmq = None - - if "tracing" in requires and requires["tracing"].interface_name == "tracing": - self._tracing = TracingEndpointRequirer( - self, relation_name="tracing", protocols=["otlp_http"] - ) - # add self.framework.observe for relation changed and departed - else: - self._tracing = None + requires: dict[str, RelationMeta] = self.framework.meta.requires + self._redis = self._init_redis(requires) + self._s3 = self._init_s3(requires) + self._saml = self._init_saml(requires) + self._rabbitmq = self._init_rabbitmq(requires) + self._tracing = self._init_tracing(requires) self._database_migration = DatabaseMigration( container=self.unit.get_container(self._workload_config.container_name), @@ -182,6 +146,93 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self.on[self._workload_config.container_name].pebble_ready, self._on_pebble_ready ) + def _init_redis(self, requires: dict[str, RelationMeta]) -> RedisRequires | None: + """Initialize the Redis relation if its required. + + Args: + requires: relation requires dictionary from metadata + + Returns: + Returns the Redis relation or None + """ + _redis = None + if "redis" in requires and requires["redis"].interface_name == "redis": + _redis = RedisRequires(charm=self, relation_name="redis") + self.framework.observe(self.on.redis_relation_updated, self._on_redis_relation_updated) + return _redis + + def _init_s3(self, requires: dict[str, RelationMeta]) -> S3Requirer | None: + """Initialize the S3 relation if its required. + + Args: + requires: relation requires dictionary from metadata + + Returns: + Returns the S3 relation or None + """ + _s3 = None + if "s3" in requires and requires["s3"].interface_name == "s3": + _s3 = S3Requirer(charm=self, relation_name="s3", bucket_name=self.app.name) + self.framework.observe(_s3.on.credentials_changed, self._on_s3_credential_changed) + self.framework.observe(_s3.on.credentials_gone, self._on_s3_credential_gone) + return _s3 + + def _init_saml(self, requires: dict[str, RelationMeta]) -> SamlRequires | None: + """Initialize the SAML relation if its required. + + Args: + requires: relation requires dictionary from metadata + + Returns: + Returns the SAML relation or None + """ + _saml = None + if "saml" in requires and requires["saml"].interface_name == "saml": + _saml = SamlRequires(self) + self.framework.observe(_saml.on.saml_data_available, self._on_saml_data_available) + return _saml + + def _init_rabbitmq(self, requires: dict[str, RelationMeta]) -> RabbitMQRequires | None: + """Initialize the RabbitMQ relation if its required. + + Args: + requires: relation requires dictionary from metadata + + Returns: + Returns the RabbitMQ relation or None + """ + _rabbitmq = None + if "rabbitmq" in requires and requires["rabbitmq"].interface_name == "rabbitmq": + _rabbitmq = RabbitMQRequires( + self, + "rabbitmq", + username=self.app.name, + vhost="/", + ) + self.framework.observe(_rabbitmq.on.connected, self._on_rabbitmq_connected) + self.framework.observe(_rabbitmq.on.ready, self._on_rabbitmq_ready) + self.framework.observe(_rabbitmq.on.departed, self._on_rabbitmq_departed) + + return _rabbitmq + + def _init_tracing(self, requires: dict[str, RelationMeta]) -> TracingEndpointRequirer | None: + """Initialize the Tracing relation if its required. + + Args: + requires: relation requires dictionary from metadata + + Returns: + Returns the Tracing relation or None + """ + _tracing = None + if "tracing" in requires and requires["tracing"].interface_name == "tracing": + _tracing = TracingEndpointRequirer( + self, relation_name="tracing", protocols=["otlp_http"] + ) + self.framework.observe(_tracing.on.endpoint_changed, self._on_tracing_relation_changed) + self.framework.observe(_tracing.on.endpoint_removed, self._on_tracing_relation_broken) + return _tracing + def get_framework_config(self) -> BaseModel: """Return the framework related configurations. @@ -273,7 +324,7 @@ def is_ready(self) -> bool: self.update_app_and_unit_status(ops.WaitingStatus("Waiting for peer integration")) return False - missing_integrations = self._missing_required_integrations(charm_state) + missing_integrations = list(self._missing_required_integrations(charm_state)) if missing_integrations: self._create_app().stop_all_services() self._database_migration.set_status_to_pending() @@ -284,43 +335,71 @@ def is_ready(self) -> bool: return True - # pylint: disable=too-many-branches - # disabled because we have too many possible integrations for the workload. - # Pending to refactor all integrations - def _missing_required_integrations(self, charm_state: CharmState) -> list[str]: # noqa: C901 - """Get list of missing integrations that are required. + def _missing_required_database_integrations( + self, requires: dict[str, RelationMeta], charm_state: CharmState + ) -> typing.Generator: + """Return required database integrations. Args: - charm_state: the charm state - - Returns: - list of names of missing integrations + requires: relation requires dictionary from metadata + charm_state: current charm state """ - missing_integrations = [] - requires = self.framework.meta.requires for name in self._database_requirers.keys(): if ( name not in charm_state.integrations.databases_uris or charm_state.integrations.databases_uris[name] is None ): if not requires[name].optional: - missing_integrations.append(name) + yield name + + if self._rabbitmq and not charm_state.integrations.rabbitmq_uri: + if not requires["rabbitmq"].optional: + yield "rabbitmq" + + def _missing_required_storage_integrations( + self, requires: dict[str, RelationMeta], charm_state: CharmState + ) -> typing.Generator: + """Return required storage integrations. + + Args: + requires: relation requires dictionary from metadata + charm_state: current charm state + """ if self._redis and not charm_state.integrations.redis_uri: if not requires["redis"].optional: - missing_integrations.append("redis") + yield "redis" if self._s3 and not charm_state.integrations.s3_parameters: if not requires["s3"].optional: - missing_integrations.append("s3") + yield "s3" + + def _missing_required_other_integrations( + self, requires: dict[str, RelationMeta], charm_state: CharmState + ) -> typing.Generator: + """Return required various integrations. + + Args: + requires: relation requires dictionary from metadata + charm_state: current charm state + """ if self._saml and not charm_state.integrations.saml_parameters: if not requires["saml"].optional: - missing_integrations.append("saml") - if self._rabbitmq and not charm_state.integrations.rabbitmq_uri: - if not requires["rabbitmq"].optional: - missing_integrations.append("rabbitmq") + yield "saml" if self._tracing and not charm_state.integrations.tracing_relation_data: if not requires["tracing"].optional: - missing_integrations.append("tracing") - return missing_integrations + yield "tracing" + + def _missing_required_integrations( + self, charm_state: CharmState + ) -> typing.Generator: # noqa: C901 + """Get list of missing integrations that are required. + + Args: + charm_state: the charm state + """ + requires = self.framework.meta.requires + yield from self._missing_required_database_integrations(requires, charm_state) + yield from self._missing_required_storage_integrations(requires, charm_state) + yield from self._missing_required_other_integrations(requires, charm_state) def restart(self, rerun_migrations: bool = False) -> None: """Restart or start the service if not started with the latest configuration. @@ -376,7 +455,7 @@ def _create_charm_state(self) -> CharmState: if self._tracing and self._tracing.is_ready(): tracing_relation_data = TempoParameters( endpoint=self._tracing.get_endpoint(protocol="otlp_http"), - service_name=f"{self.framework.meta.name}-charm", + service_name=f"{self.framework.meta.name}-app", ) return CharmState.from_charm( config=config, @@ -503,3 +582,13 @@ def _on_rabbitmq_ready(self, _: ops.HookEvent) -> None: def _on_rabbitmq_departed(self, _: ops.HookEvent) -> None: """Handle rabbitmq departed event.""" self.restart() + + @block_if_invalid_config + def _on_tracing_relation_changed(self, _: ops.HookEvent) -> None: + """Handle tracing relation changed event.""" + self.restart() + + @block_if_invalid_config + def _on_tracing_relation_broken(self, _: ops.HookEvent) -> None: + """Handle tracing relation broken event.""" + self.restart() diff --git a/src/paas_charm/charm_state.py b/src/paas_charm/charm_state.py index e5f7318..5d83524 100644 --- a/src/paas_charm/charm_state.py +++ b/src/paas_charm/charm_state.py @@ -20,6 +20,95 @@ logger = logging.getLogger(__name__) +class TempoParameters(BaseModel): + """Configuration for accessing S3 bucket. + + Attributes: + endpoint: Tempo endpoint URL to send the traces. + service_name: Tempo service name for the workload. + """ + + endpoint: str | None = None + service_name: str | None = None + + +class S3Parameters(BaseModel): + """Configuration for accessing S3 bucket. + + Attributes: + access_key: AWS access key. + secret_key: AWS secret key. + region: The region to connect to the object storage. + storage_class: Storage Class for objects uploaded to the object storage. + bucket: The bucket name. + endpoint: The endpoint used to connect to the object storage. + path: The path inside the bucket to store objects. + s3_api_version: S3 protocol specific API signature. + s3_uri_style: The S3 protocol specific bucket path lookup type. Can be "path" or "host". + addressing_style: S3 protocol addressing style, can be "path" or "virtual". + attributes: The custom metadata (HTTP headers). + tls_ca_chain: The complete CA chain, which can be used for HTTPS validation. + """ + + access_key: str = Field(alias="access-key") + secret_key: str = Field(alias="secret-key") + region: Optional[str] = None + storage_class: Optional[str] = Field(alias="storage-class", default=None) + bucket: str + endpoint: Optional[str] = None + path: Optional[str] = None + s3_api_version: Optional[str] = Field(alias="s3-api-version", default=None) + s3_uri_style: Optional[str] = Field(alias="s3-uri-style", default=None) + tls_ca_chain: Optional[list[str]] = Field(alias="tls-ca-chain", default=None) + attributes: Optional[list[str]] = None + + @property + def addressing_style(self) -> Optional[str]: + """Translates s3_uri_style to AWS addressing_style.""" + if self.s3_uri_style == "host": + return "virtual" + # If None or "path", it does not change. + return self.s3_uri_style + + +class SamlParameters(BaseModel, extra=Extra.allow): + """Configuration for accessing SAML. + + Attributes: + entity_id: Entity Id of the SP. + metadata_url: URL for the metadata for the SP. + signing_certificate: Signing certificate for the SP. + single_sign_on_redirect_url: Sign on redirect URL for the SP. + """ + + entity_id: str + metadata_url: str + signing_certificate: str = Field(alias="x509certs") + single_sign_on_redirect_url: str = Field(alias="single_sign_on_service_redirect_url") + + @field_validator("signing_certificate") + @classmethod + def validate_signing_certificate_exists(cls, certs: str, _: ValidationInfo) -> str: + """Validate that at least a certificate exists in the list of certificates. + + It is a prerequisite that the fist certificate is the signing certificate, + otherwise this method would return a wrong certificate. + + Args: + certs: Original x509certs field + + Returns: + The validated signing certificate + + Raises: + ValueError: If there is no certificate. + """ + certificate = certs.split(",")[0] + if not certificate: + raise ValueError("Missing x509certs. There should be at least one certificate.") + return certificate + + class ProxyConfig(BaseModel): """Configuration for network access through proxy. @@ -89,7 +178,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None = None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, - tracing_relation_data: "TempoParameters | None" = None, + tracing_relation_data: TempoParameters | None = None, base_url: str | None = None, ) -> "CharmState": """Initialize a new instance of the CharmState class from the associated charm. @@ -105,7 +194,7 @@ def from_charm( # pylint: disable=too-many-arguments saml_relation_data: Relation data from the SAML app. rabbitmq_uri: RabbitMQ uri. tracing_relation_data: The tracing uri provided by the Tempo coordinator charm - and charm name. + and charm name. base_url: Base URL for the service. Return: @@ -214,15 +303,15 @@ class IntegrationsState: saml_parameters: SAML parameters. rabbitmq_uri: RabbitMQ uri. tracing_relation_data: The tracing uri provided by the Tempo coordinator charm - and charm name. + and charm name. """ redis_uri: str | None = None databases_uris: dict[str, str] = field(default_factory=dict) - s3_parameters: "S3Parameters | None" = None - saml_parameters: "SamlParameters | None" = None + s3_parameters: S3Parameters | None = None + saml_parameters: SamlParameters | None = None rabbitmq_uri: str | None = None - tracing_relation_data: "TempoParameters | None" = None + tracing_relation_data: TempoParameters | None = None # This dataclass combines all the integrations, so it is reasonable that they stay together. @classmethod @@ -234,7 +323,7 @@ def build( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, - tracing_relation_data: "TempoParameters | None" = None, + tracing_relation_data: TempoParameters | None = None, ) -> "IntegrationsState": """Initialize a new instance of the IntegrationsState class. @@ -247,7 +336,7 @@ def build( # pylint: disable=too-many-arguments saml_relation_data: Saml relation data from saml lib. rabbitmq_uri: RabbitMQ uri. tracing_relation_data: The tracing uri provided by the Tempo coordinator charm - and charm name. + and charm name. Return: The IntegrationsState instance created. @@ -297,92 +386,3 @@ def build( # pylint: disable=too-many-arguments rabbitmq_uri=rabbitmq_uri, tracing_relation_data=tracing_relation_data, ) - - -class TempoParameters(BaseModel): - """Configuration for accessing S3 bucket. - - Attributes: - endpoint: Tempo endpoint URL to send the traces. - service_name: Tempo service name for the workload. - """ - - endpoint: str | None = None - service_name: str | None = None - - -class S3Parameters(BaseModel): - """Configuration for accessing S3 bucket. - - Attributes: - access_key: AWS access key. - secret_key: AWS secret key. - region: The region to connect to the object storage. - storage_class: Storage Class for objects uploaded to the object storage. - bucket: The bucket name. - endpoint: The endpoint used to connect to the object storage. - path: The path inside the bucket to store objects. - s3_api_version: S3 protocol specific API signature. - s3_uri_style: The S3 protocol specific bucket path lookup type. Can be "path" or "host". - addressing_style: S3 protocol addressing style, can be "path" or "virtual". - attributes: The custom metadata (HTTP headers). - tls_ca_chain: The complete CA chain, which can be used for HTTPS validation. - """ - - access_key: str = Field(alias="access-key") - secret_key: str = Field(alias="secret-key") - region: Optional[str] = None - storage_class: Optional[str] = Field(alias="storage-class", default=None) - bucket: str - endpoint: Optional[str] = None - path: Optional[str] = None - s3_api_version: Optional[str] = Field(alias="s3-api-version", default=None) - s3_uri_style: Optional[str] = Field(alias="s3-uri-style", default=None) - tls_ca_chain: Optional[list[str]] = Field(alias="tls-ca-chain", default=None) - attributes: Optional[list[str]] = None - - @property - def addressing_style(self) -> Optional[str]: - """Translates s3_uri_style to AWS addressing_style.""" - if self.s3_uri_style == "host": - return "virtual" - # If None or "path", it does not change. - return self.s3_uri_style - - -class SamlParameters(BaseModel, extra=Extra.allow): - """Configuration for accessing SAML. - - Attributes: - entity_id: Entity Id of the SP. - metadata_url: URL for the metadata for the SP. - signing_certificate: Signing certificate for the SP. - single_sign_on_redirect_url: Sign on redirect URL for the SP. - """ - - entity_id: str - metadata_url: str - signing_certificate: str = Field(alias="x509certs") - single_sign_on_redirect_url: str = Field(alias="single_sign_on_service_redirect_url") - - @field_validator("signing_certificate") - @classmethod - def validate_signing_certificate_exists(cls, certs: str, _: ValidationInfo) -> str: - """Validate that at least a certificate exists in the list of certificates. - - It is a prerequisite that the fist certificate is the signing certificate, - otherwise this method would return a wrong certificate. - - Args: - certs: Original x509certs field - - Returns: - The validated signing certificate - - Raises: - ValueError: If there is no certificate. - """ - certificate = certs.split(",")[0] - if not certificate: - raise ValueError("Missing x509certs. There should be at least one certificate.") - return certificate diff --git a/tests/unit/flask/test_tracing.py b/tests/unit/flask/test_tracing.py index 4b11c7e..9403571 100644 --- a/tests/unit/flask/test_tracing.py +++ b/tests/unit/flask/test_tracing.py @@ -36,4 +36,4 @@ def test_tracing_relation(harness: Harness): service_env = container.get_plan().services["flask"].environment print(service_env) assert service_env["OTEL_EXPORTER_OTLP_ENDPOINT"] == "http://test-ip:4318" - assert service_env["OTEL_SERVICE_NAME"] == "flask-k8s-charm" + assert service_env["OTEL_SERVICE_NAME"] == "flask-k8s-app" From 259648dbc85da61a664c7571b066f0008b0df617 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 6 Jan 2025 08:49:47 +0300 Subject: [PATCH 14/70] chore(): Modify examples to test out tracing. --- examples/fastapi/app.py | 8 +++++- examples/fastapi/charm/charmcraft.yaml | 4 +++ examples/fastapi/requirements.txt | 3 ++ examples/flask/test_rock/app.py | 35 +++++++++++++++++++++++ examples/flask/test_rock/requirements.txt | 8 ++++++ src/paas_charm/_gunicorn/webserver.py | 31 ++++++++++++++++++++ 6 files changed, 88 insertions(+), 1 deletion(-) diff --git a/examples/fastapi/app.py b/examples/fastapi/app.py index 5f46f85..7b7bb1a 100644 --- a/examples/fastapi/app.py +++ b/examples/fastapi/app.py @@ -7,8 +7,13 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker +from opentelemetry import trace +from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor app = FastAPI() + +FastAPIInstrumentor.instrument_app(app) +tracer = trace.get_tracer(__name__) engine = create_engine(os.environ["POSTGRESQL_DB_CONNECT_STRING"], echo=True) Session = scoped_session(sessionmaker(bind=engine)) @@ -26,7 +31,8 @@ class User(Base): @app.get("/") async def root(): - return "Hello, World!" + with tracer.start_as_current_span("custom-span"): + return "Hello, World!" @app.get("/env/user-defined-config") diff --git a/examples/fastapi/charm/charmcraft.yaml b/examples/fastapi/charm/charmcraft.yaml index fc19128..fc5d4df 100644 --- a/examples/fastapi/charm/charmcraft.yaml +++ b/examples/fastapi/charm/charmcraft.yaml @@ -79,6 +79,10 @@ requires: interface: postgresql_client optional: True limit: 1 + tracing: + interface: tracing + optional: True + limit: 1 resources: app-image: description: FastAPI application image. diff --git a/examples/fastapi/requirements.txt b/examples/fastapi/requirements.txt index a697414..9af200c 100644 --- a/examples/fastapi/requirements.txt +++ b/examples/fastapi/requirements.txt @@ -2,3 +2,6 @@ fastapi SQLAlchemy alembic psycopg2-binary +opentelemetry-api +opentelemetry-sdk +opentelemetry-instrumentation-fastapi diff --git a/examples/flask/test_rock/app.py b/examples/flask/test_rock/app.py index 7d0b087..4ce86f1 100644 --- a/examples/flask/test_rock/app.py +++ b/examples/flask/test_rock/app.py @@ -21,6 +21,8 @@ from celery import Celery, Task from flask import Flask, g, jsonify, request +from opentelemetry import trace +from opentelemetry.instrumentation.flask import FlaskInstrumentor def hostname(): """Get the hostname of the current machine.""" @@ -55,12 +57,45 @@ def __call__(self, *args: object, **kwargs: object) -> object: app = Flask(__name__) app.config.from_prefixed_env() +FlaskInstrumentor().instrument_app(app) + +tracer = trace.get_tracer(__name__) + broker_url = os.environ.get("REDIS_DB_CONNECT_STRING") # Configure Celery only if Redis is configured celery_app = celery_init_app(app, broker_url) redis_client = redis.Redis.from_url(broker_url) if broker_url else None +def fib_slow(n): + if n <= 1: + return n + return fib_slow(n - 1) + fib_fast(n - 2) + + +def fib_fast(n): + nth_fib = [0] * (n + 2) + nth_fib[1] = 1 + for i in range(2, n + 1): + nth_fib[i] = nth_fib[i - 1] + nth_fib[i - 2] + return nth_fib[n] + + +@application.route("/fibonacci") +def fibonacci(): + n = int(request.args.get("n", 1)) + with tracer.start_as_current_span("root"): + with tracer.start_as_current_span("fib_slow") as slow_span: + ans = fib_slow(n) + slow_span.set_attribute("n", n) + slow_span.set_attribute("nth_fibonacci", ans) + with tracer.start_as_current_span("fib_fast") as fast_span: + ans = fib_fast(n) + fast_span.set_attribute("n", n) + fast_span.set_attribute("nth_fibonacci", ans) + + return f"F({n}) is: ({ans})" + @celery_app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): """Set up periodic tasks in the scheduler.""" diff --git a/examples/flask/test_rock/requirements.txt b/examples/flask/test_rock/requirements.txt index 2ff69c0..9966cad 100644 --- a/examples/flask/test_rock/requirements.txt +++ b/examples/flask/test_rock/requirements.txt @@ -8,3 +8,11 @@ redis[hiredis] boto3 pika celery +opentelemetry-api +opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http +opentelemetry-instrumentation +opentelemetry-instrumentation-flask +opentelemetry-instrumentation-wsgi +opentelemetry-sdk +opentelemetry-distro diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index 854ed72..a9828d6 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -139,6 +139,37 @@ def _config(self) -> str: statsd_host = {repr(STATSD_HOST)} """ ) + framework_environments = self._container.get_plan().to_dict()['services'][self._workload_config.framework]['environment'] + tracing_uri = None + tracing_service_name = None + if framework_environments.get('OTEL_EXPORTER_OTLP_ENDPOINT', None): + tracing_endpoint = framework_environments['OTEL_EXPORTER_OTLP_ENDPOINT'] + tracing_service_name = framework_environments['OTEL_SERVICE_NAME'] + + config += textwrap.dedent( + f"""\ + from opentelemetry import trace + from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + from opentelemetry.sdk.resources import Resource + from opentelemetry.sdk.trace import TracerProvider + from opentelemetry.sdk.trace.export import BatchSpanProcessor + + def post_fork(server, worker): + resource = Resource.create( + attributes={{ + "service.name": "{tracing_service_name}", + "worker": worker.pid, + }} + ) + trace.set_tracer_provider(TracerProvider(resource=resource)) + span_processor = BatchSpanProcessor( + OTLPSpanExporter( + endpoint="{tracing_endpoint}/v1/traces" + ) + ) + trace.get_tracer_provider().add_span_processor(span_processor) + """ + ) config += "\n".join(config_entries) return config From 6cda4e533cf94badb0bd8317c213ab0b08310990 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 6 Jan 2025 08:53:50 +0300 Subject: [PATCH 15/70] chore(lint): Format code --- examples/fastapi/app.py | 4 ++-- examples/flask/test_rock/app.py | 13 +++++++------ src/paas_charm/_gunicorn/webserver.py | 12 +++++++----- 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/examples/fastapi/app.py b/examples/fastapi/app.py index 7b7bb1a..d27d337 100644 --- a/examples/fastapi/app.py +++ b/examples/fastapi/app.py @@ -3,12 +3,12 @@ import os from fastapi import FastAPI, HTTPException +from opentelemetry import trace +from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor from sqlalchemy import Column, Integer, String, create_engine, inspect from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker -from opentelemetry import trace -from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor app = FastAPI() diff --git a/examples/flask/test_rock/app.py b/examples/flask/test_rock/app.py index 4ce86f1..03da5ef 100644 --- a/examples/flask/test_rock/app.py +++ b/examples/flask/test_rock/app.py @@ -20,10 +20,10 @@ import redis from celery import Celery, Task from flask import Flask, g, jsonify, request - from opentelemetry import trace from opentelemetry.instrumentation.flask import FlaskInstrumentor + def hostname(): """Get the hostname of the current machine.""" return socket.gethostbyname(socket.gethostname()) @@ -86,15 +86,16 @@ def fibonacci(): n = int(request.args.get("n", 1)) with tracer.start_as_current_span("root"): with tracer.start_as_current_span("fib_slow") as slow_span: - ans = fib_slow(n) + answer = fib_slow(n) slow_span.set_attribute("n", n) - slow_span.set_attribute("nth_fibonacci", ans) + slow_span.set_attribute("nth_fibonacci", answer) with tracer.start_as_current_span("fib_fast") as fast_span: - ans = fib_fast(n) + answer = fib_fast(n) fast_span.set_attribute("n", n) - fast_span.set_attribute("nth_fibonacci", ans) + fast_span.set_attribute("nth_fibonacci", answer) + + return f"F({n}) is: ({answer})" - return f"F({n}) is: ({ans})" @celery_app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index a9828d6..c636a25 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -139,12 +139,14 @@ def _config(self) -> str: statsd_host = {repr(STATSD_HOST)} """ ) - framework_environments = self._container.get_plan().to_dict()['services'][self._workload_config.framework]['environment'] - tracing_uri = None + framework_environments = self._container.get_plan().to_dict()["services"][ + self._workload_config.framework + ]["environment"] + tracing_endpoint = None tracing_service_name = None - if framework_environments.get('OTEL_EXPORTER_OTLP_ENDPOINT', None): - tracing_endpoint = framework_environments['OTEL_EXPORTER_OTLP_ENDPOINT'] - tracing_service_name = framework_environments['OTEL_SERVICE_NAME'] + if framework_environments.get("OTEL_EXPORTER_OTLP_ENDPOINT", None): + tracing_endpoint = framework_environments["OTEL_EXPORTER_OTLP_ENDPOINT"] + tracing_service_name = framework_environments["OTEL_SERVICE_NAME"] config += textwrap.dedent( f"""\ From 8187b231fd34c4da031064a11aa3609355795ec5 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 8 Jan 2025 14:34:13 +0300 Subject: [PATCH 16/70] chore(tracing): Update examples. --- app.py | 16 + examples/django/charm/charmcraft.yaml | 4 + .../tempo_coordinator_k8s/v0/tracing.py | 1000 +++++++++++++++++ .../django_app/django_app/__init__.py | 2 + .../django_app/django_app/asgi.py | 19 + .../django_app/django_app/settings.py | 130 +++ .../django_app/django_app/urls.py | 33 + .../django_app/django_app/wsgi.py | 19 + .../django_tracing_app/django_app/manage.py | 26 + .../django_tracing_app/django_app/migrate.sh | 5 + .../django_app/testing/__init__.py | 2 + .../django_app/testing/admin.py | 6 + .../django_app/testing/apps.py | 9 + .../django_app/testing/migrations/__init__.py | 2 + .../django_app/testing/models.py | 6 + .../django_app/testing/tests.py | 6 + .../django_app/testing/views.py | 49 + .../django_tracing_app/requirements.txt | 9 + .../django/django_tracing_app/rockcraft.yaml | 14 + .../tempo_coordinator_k8s/v0/tracing.py | 1000 +++++++++++++++++ .../fastapi/{ => fastapi_app}/alembic.ini | 0 .../fastapi/{ => fastapi_app}/alembic/env.py | 0 .../eca6177bd16a_initial_migration.py | 0 examples/fastapi/{ => fastapi_app}/app.py | 8 +- examples/fastapi/{ => fastapi_app}/migrate.sh | 0 examples/fastapi/fastapi_app/requirements.txt | 4 + .../fastapi/{ => fastapi_app}/rockcraft.yaml | 0 .../fastapi/fastapi_tracing_app/alembic.ini | 5 + .../fastapi_tracing_app/alembic/env.py | 24 + .../eca6177bd16a_initial_migration.py | 33 + examples/fastapi/fastapi_tracing_app/app.py | 58 + .../fastapi/fastapi_tracing_app/migrate.sh | 5 + .../fastapi_tracing_app/requirements.txt | 11 + .../fastapi_tracing_app/rockcraft.yaml | 25 + examples/fastapi/requirements.txt | 7 - examples/flask/test_rock/app.py | 36 - examples/flask/test_tracing_rock/app.py | 395 +++++++ .../flask/test_tracing_rock/requirements.txt | 18 + .../flask/test_tracing_rock/rockcraft.yaml | 27 + examples/go/charm/charmcraft.yaml | 4 + .../tempo_coordinator_k8s/v0/tracing.py | 1000 +++++++++++++++++ examples/go/{ => go_app}/go.mod | 0 examples/go/{ => go_app}/go.sum | 0 .../{ => go_app}/internal/service/service.go | 0 examples/go/{ => go_app}/main.go | 0 examples/go/{ => go_app}/migrate.sh | 0 examples/go/{ => go_app}/rockcraft.yaml | 0 examples/go/go_tracing_app/go.mod | 46 + examples/go/go_tracing_app/go.sum | 96 ++ .../internal/service/service.go | 61 + examples/go/go_tracing_app/main.go | 181 +++ examples/go/go_tracing_app/migrate.sh | 5 + examples/go/go_tracing_app/rockcraft.yaml | 60 + src/paas_charm/_gunicorn/webserver.py | 23 +- src/paas_charm/charm.py | 12 +- 55 files changed, 4427 insertions(+), 74 deletions(-) create mode 100644 app.py create mode 100644 examples/django/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py create mode 100644 examples/django/django_tracing_app/django_app/django_app/__init__.py create mode 100644 examples/django/django_tracing_app/django_app/django_app/asgi.py create mode 100644 examples/django/django_tracing_app/django_app/django_app/settings.py create mode 100644 examples/django/django_tracing_app/django_app/django_app/urls.py create mode 100644 examples/django/django_tracing_app/django_app/django_app/wsgi.py create mode 100755 examples/django/django_tracing_app/django_app/manage.py create mode 100644 examples/django/django_tracing_app/django_app/migrate.sh create mode 100644 examples/django/django_tracing_app/django_app/testing/__init__.py create mode 100644 examples/django/django_tracing_app/django_app/testing/admin.py create mode 100644 examples/django/django_tracing_app/django_app/testing/apps.py create mode 100644 examples/django/django_tracing_app/django_app/testing/migrations/__init__.py create mode 100644 examples/django/django_tracing_app/django_app/testing/models.py create mode 100644 examples/django/django_tracing_app/django_app/testing/tests.py create mode 100644 examples/django/django_tracing_app/django_app/testing/views.py create mode 100644 examples/django/django_tracing_app/requirements.txt create mode 100644 examples/django/django_tracing_app/rockcraft.yaml create mode 100644 examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py rename examples/fastapi/{ => fastapi_app}/alembic.ini (100%) rename examples/fastapi/{ => fastapi_app}/alembic/env.py (100%) rename examples/fastapi/{ => fastapi_app}/alembic/versions/eca6177bd16a_initial_migration.py (100%) rename examples/fastapi/{ => fastapi_app}/app.py (80%) rename examples/fastapi/{ => fastapi_app}/migrate.sh (100%) create mode 100644 examples/fastapi/fastapi_app/requirements.txt rename examples/fastapi/{ => fastapi_app}/rockcraft.yaml (100%) create mode 100644 examples/fastapi/fastapi_tracing_app/alembic.ini create mode 100644 examples/fastapi/fastapi_tracing_app/alembic/env.py create mode 100644 examples/fastapi/fastapi_tracing_app/alembic/versions/eca6177bd16a_initial_migration.py create mode 100644 examples/fastapi/fastapi_tracing_app/app.py create mode 100644 examples/fastapi/fastapi_tracing_app/migrate.sh create mode 100644 examples/fastapi/fastapi_tracing_app/requirements.txt create mode 100644 examples/fastapi/fastapi_tracing_app/rockcraft.yaml delete mode 100644 examples/fastapi/requirements.txt create mode 100644 examples/flask/test_tracing_rock/app.py create mode 100644 examples/flask/test_tracing_rock/requirements.txt create mode 100644 examples/flask/test_tracing_rock/rockcraft.yaml create mode 100644 examples/go/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py rename examples/go/{ => go_app}/go.mod (100%) rename examples/go/{ => go_app}/go.sum (100%) rename examples/go/{ => go_app}/internal/service/service.go (100%) rename examples/go/{ => go_app}/main.go (100%) rename examples/go/{ => go_app}/migrate.sh (100%) rename examples/go/{ => go_app}/rockcraft.yaml (100%) create mode 100644 examples/go/go_tracing_app/go.mod create mode 100644 examples/go/go_tracing_app/go.sum create mode 100644 examples/go/go_tracing_app/internal/service/service.go create mode 100644 examples/go/go_tracing_app/main.go create mode 100755 examples/go/go_tracing_app/migrate.sh create mode 100644 examples/go/go_tracing_app/rockcraft.yaml diff --git a/app.py b/app.py new file mode 100644 index 0000000..c969e86 --- /dev/null +++ b/app.py @@ -0,0 +1,16 @@ +from flask import Flask +import time + +app = Flask(__name__) + + +@app.route("/") +def index(): + return "Hello, world!" + +@app.route("/io") +def io_bound_task(): + start_time = time.time() + time.sleep(2) + duration = time.time() - start_time + return f"I/O task completed in {round(duration, 2)} seconds" diff --git a/examples/django/charm/charmcraft.yaml b/examples/django/charm/charmcraft.yaml index 4be002f..b6884c3 100644 --- a/examples/django/charm/charmcraft.yaml +++ b/examples/django/charm/charmcraft.yaml @@ -93,6 +93,10 @@ requires: interface: postgresql_client optional: False limit: 1 + tracing: + interface: tracing + optional: True + limit: 1 resources: django-app-image: description: django application image. diff --git a/examples/django/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/django/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py new file mode 100644 index 0000000..1dd78b5 --- /dev/null +++ b/examples/django/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -0,0 +1,1000 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +"""## Overview. + +This document explains how to integrate with the Tempo charm for the purpose of pushing traces to a +tracing endpoint provided by Tempo. It also explains how alternative implementations of the Tempo charm +may maintain the same interface and be backward compatible with all currently integrated charms. + +## Requirer Library Usage + +Charms seeking to push traces to Tempo, must do so using the `TracingEndpointRequirer` +object from this charm library. For the simplest use cases, using the `TracingEndpointRequirer` +object only requires instantiating it, typically in the constructor of your charm. The +`TracingEndpointRequirer` constructor requires the name of the relation over which a tracing endpoint + is exposed by the Tempo charm, and a list of protocols it intends to send traces with. + This relation must use the `tracing` interface. + The `TracingEndpointRequirer` object may be instantiated as follows + + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer + + def __init__(self, *args): + super().__init__(*args) + # ... + self.tracing = TracingEndpointRequirer(self, + protocols=['otlp_grpc', 'otlp_http', 'jaeger_http_thrift'] + ) + # ... + +Note that the first argument (`self`) to `TracingEndpointRequirer` is always a reference to the +parent charm. + +Alternatively to providing the list of requested protocols at init time, the charm can do it at +any point in time by calling the +`TracingEndpointRequirer.request_protocols(*protocol:str, relation:Optional[Relation])` method. +Using this method also allows you to use per-relation protocols. + +Units of requirer charms obtain the tempo endpoint to which they will push their traces by calling +`TracingEndpointRequirer.get_endpoint(protocol: str)`, where `protocol` is, for example: +- `otlp_grpc` +- `otlp_http` +- `zipkin` +- `tempo` + +If the `protocol` is not in the list of protocols that the charm requested at endpoint set-up time, +the library will raise an error. + +We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests +go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down. + +## Provider Library Usage + +The `TracingEndpointProvider` object may be used by charms to manage relations with their +trace sources. For this purposes a Tempo-like charm needs to do two things + +1. Instantiate the `TracingEndpointProvider` object by providing it a +reference to the parent (Tempo) charm and optionally the name of the relation that the Tempo charm +uses to interact with its trace sources. This relation must conform to the `tracing` interface +and it is strongly recommended that this relation be named `tracing` which is its +default value. + +For example a Tempo charm may instantiate the `TracingEndpointProvider` in its constructor as +follows + + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointProvider + + def __init__(self, *args): + super().__init__(*args) + # ... + self.tracing = TracingEndpointProvider(self) + # ... + + + +""" # noqa: W505 +import enum +import json +import logging +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Dict, + List, + Literal, + MutableMapping, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +import pydantic +from ops.charm import CharmBase, CharmEvents, RelationBrokenEvent, RelationEvent, RelationRole +from ops.framework import EventSource, Object +from ops.model import ModelError, Relation +from pydantic import BaseModel, Field + +# The unique Charmhub library identifier, never change it +LIBID = "d2f02b1f8d1244b5989fd55bc3a28943" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 4 + +PYDEPS = ["pydantic"] + +logger = logging.getLogger(__name__) + +DEFAULT_RELATION_NAME = "tracing" +RELATION_INTERFACE_NAME = "tracing" + +# Supported list rationale https://github.com/canonical/tempo-coordinator-k8s-operator/issues/8 +ReceiverProtocol = Literal[ + "zipkin", + "otlp_grpc", + "otlp_http", + "jaeger_grpc", + "jaeger_thrift_http", +] + +RawReceiver = Tuple[ReceiverProtocol, str] +"""Helper type. A raw receiver is defined as a tuple consisting of the protocol name, and the (external, if available), +(secured, if available) resolvable server url. +""" + +BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"} + + +class TransportProtocolType(str, enum.Enum): + """Receiver Type.""" + + http = "http" + grpc = "grpc" + + +receiver_protocol_to_transport_protocol: Dict[ReceiverProtocol, TransportProtocolType] = { + "zipkin": TransportProtocolType.http, + "otlp_grpc": TransportProtocolType.grpc, + "otlp_http": TransportProtocolType.http, + "jaeger_thrift_http": TransportProtocolType.http, + "jaeger_grpc": TransportProtocolType.grpc, +} +"""A mapping between telemetry protocols and their corresponding transport protocol. +""" + + +class TracingError(Exception): + """Base class for custom errors raised by this library.""" + + +class NotReadyError(TracingError): + """Raised by the provider wrapper if a requirer hasn't published the required data (yet).""" + + +class ProtocolNotRequestedError(TracingError): + """Raised if the user attempts to obtain an endpoint for a protocol it did not request.""" + + +class DataValidationError(TracingError): + """Raised when data validation fails on IPU relation data.""" + + +class AmbiguousRelationUsageError(TracingError): + """Raised when one wrongly assumes that there can only be one relation on an endpoint.""" + + +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class DatabagModel(BaseModel): # type: ignore + """Base databag model.""" + + class Config: + """Pydantic config.""" + + # ignore any extra fields in the databag + extra = "ignore" + """Ignore any extra fields in the databag.""" + allow_population_by_field_name = True + """Allow instantiating this class by field name (instead of forcing alias).""" + + _NEST_UNDER = None + + @classmethod + def load(cls, databag: MutableMapping): + """Load this model from a Juju databag.""" + if cls._NEST_UNDER: + return cls.parse_obj(json.loads(databag[cls._NEST_UNDER])) + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {f.alias for f in cls.__fields__.values()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.parse_raw(json.dumps(data)) # type: ignore + except pydantic.ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): + """Write the contents of this model to Juju databag. + + :param databag: the databag to write the data to. + :param clear: ensure the databag is cleared before writing it. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + + if self._NEST_UNDER: + databag[self._NEST_UNDER] = self.json(by_alias=True) + return databag + + dct = self.dict() + for key, field in self.__fields__.items(): # type: ignore + value = dct[key] + databag[field.alias or key] = json.dumps(value) + + return databag + +else: + from pydantic import ConfigDict + + class DatabagModel(BaseModel): + """Base databag model.""" + + model_config = ConfigDict( + # ignore any extra fields in the databag + extra="ignore", + # Allow instantiating this class by field name (instead of forcing alias). + populate_by_name=True, + # Custom config key: whether to nest the whole datastructure (as json) + # under a field or spread it out at the toplevel. + _NEST_UNDER=None, # type: ignore + ) + """Pydantic config.""" + + @classmethod + def load(cls, databag: MutableMapping): + """Load this model from a Juju databag.""" + nest_under = cls.model_config.get("_NEST_UNDER") # type: ignore + if nest_under: + return cls.model_validate(json.loads(databag[nest_under])) # type: ignore + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {(f.alias or n) for n, f in cls.__fields__.items()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.model_validate_json(json.dumps(data)) # type: ignore + except pydantic.ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): + """Write the contents of this model to Juju databag. + + :param databag: the databag to write the data to. + :param clear: ensure the databag is cleared before writing it. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + nest_under = self.model_config.get("_NEST_UNDER") + if nest_under: + databag[nest_under] = self.model_dump_json( # type: ignore + by_alias=True, + # skip keys whose values are default + exclude_defaults=True, + ) + return databag + + dct = self.model_dump() # type: ignore + for key, field in self.model_fields.items(): # type: ignore + value = dct[key] + if value == field.default: + continue + databag[field.alias or key] = json.dumps(value) + + return databag + + +# todo use models from charm-relation-interfaces +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class ProtocolType(BaseModel): # type: ignore + """Protocol Type.""" + + class Config: + """Pydantic config.""" + + use_enum_values = True + """Allow serializing enum values.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + +else: + + class ProtocolType(BaseModel): + """Protocol Type.""" + + model_config = ConfigDict( # type: ignore + # Allow serializing enum values. + use_enum_values=True + ) + """Pydantic config.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + + +class Receiver(BaseModel): + """Specification of an active receiver.""" + + protocol: ProtocolType = Field(..., description="Receiver protocol name and type.") + url: str = Field( + ..., + description="""URL at which the receiver is reachable. If there's an ingress, it would be the external URL. + Otherwise, it would be the service's fqdn or internal IP. + If the protocol type is grpc, the url will not contain a scheme.""", + examples=[ + "http://traefik_address:2331", + "https://traefik_address:2331", + "http://tempo_public_ip:2331", + "https://tempo_public_ip:2331", + "tempo_public_ip:2331", + ], + ) + + +class TracingProviderAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing provider.""" + + receivers: List[Receiver] = Field( + ..., + description="List of all receivers enabled on the tracing provider.", + ) + + +class TracingRequirerAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing requirer.""" + + receivers: List[ReceiverProtocol] + """Requested receivers.""" + + +class _AutoSnapshotEvent(RelationEvent): + __args__: Tuple[str, ...] = () + __optional_kwargs__: Dict[str, Any] = {} + + @classmethod + def __attrs__(cls): + return cls.__args__ + tuple(cls.__optional_kwargs__.keys()) + + def __init__(self, handle, relation, *args, **kwargs): + super().__init__(handle, relation) + + if not len(self.__args__) == len(args): + raise TypeError("expected {} args, got {}".format(len(self.__args__), len(args))) + + for attr, obj in zip(self.__args__, args): + setattr(self, attr, obj) + for attr, default in self.__optional_kwargs__.items(): + obj = kwargs.get(attr, default) + setattr(self, attr, obj) + + def snapshot(self) -> dict: + dct = super().snapshot() + for attr in self.__attrs__(): + obj = getattr(self, attr) + try: + dct[attr] = obj + except ValueError as e: + raise ValueError( + "cannot automagically serialize {}: " + "override this method and do it " + "manually.".format(obj) + ) from e + + return dct + + def restore(self, snapshot: dict) -> None: + super().restore(snapshot) + for attr, obj in snapshot.items(): + setattr(self, attr, obj) + + +class RelationNotFoundError(Exception): + """Raised if no relation with the given name is found.""" + + def __init__(self, relation_name: str): + self.relation_name = relation_name + self.message = "No relation named '{}' found".format(relation_name) + super().__init__(self.message) + + +class RelationInterfaceMismatchError(Exception): + """Raised if the relation with the given name has an unexpected interface.""" + + def __init__( + self, + relation_name: str, + expected_relation_interface: str, + actual_relation_interface: str, + ): + self.relation_name = relation_name + self.expected_relation_interface = expected_relation_interface + self.actual_relation_interface = actual_relation_interface + self.message = ( + "The '{}' relation has '{}' as interface rather than the expected '{}'".format( + relation_name, actual_relation_interface, expected_relation_interface + ) + ) + + super().__init__(self.message) + + +class RelationRoleMismatchError(Exception): + """Raised if the relation with the given name has a different role than expected.""" + + def __init__( + self, + relation_name: str, + expected_relation_role: RelationRole, + actual_relation_role: RelationRole, + ): + self.relation_name = relation_name + self.expected_relation_interface = expected_relation_role + self.actual_relation_role = actual_relation_role + self.message = "The '{}' relation has role '{}' rather than the expected '{}'".format( + relation_name, repr(actual_relation_role), repr(expected_relation_role) + ) + + super().__init__(self.message) + + +def _validate_relation_by_interface_and_direction( + charm: CharmBase, + relation_name: str, + expected_relation_interface: str, + expected_relation_role: RelationRole, +): + """Validate a relation. + + Verifies that the `relation_name` provided: (1) exists in metadata.yaml, + (2) declares as interface the interface name passed as `relation_interface` + and (3) has the right "direction", i.e., it is a relation that `charm` + provides or requires. + + Args: + charm: a `CharmBase` object to scan for the matching relation. + relation_name: the name of the relation to be verified. + expected_relation_interface: the interface name to be matched by the + relation named `relation_name`. + expected_relation_role: whether the `relation_name` must be either + provided or required by `charm`. + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the same relation interface + as specified via the `expected_relation_interface` argument. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the same role as specified + via the `expected_relation_role` argument. + """ + if relation_name not in charm.meta.relations: + raise RelationNotFoundError(relation_name) + + relation = charm.meta.relations[relation_name] + + # fixme: why do we need to cast here? + actual_relation_interface = cast(str, relation.interface_name) + + if actual_relation_interface != expected_relation_interface: + raise RelationInterfaceMismatchError( + relation_name, expected_relation_interface, actual_relation_interface + ) + + if expected_relation_role is RelationRole.provides: + if relation_name not in charm.meta.provides: + raise RelationRoleMismatchError( + relation_name, RelationRole.provides, RelationRole.requires + ) + elif expected_relation_role is RelationRole.requires: + if relation_name not in charm.meta.requires: + raise RelationRoleMismatchError( + relation_name, RelationRole.requires, RelationRole.provides + ) + else: + raise TypeError("Unexpected RelationDirection: {}".format(expected_relation_role)) + + +class RequestEvent(RelationEvent): + """Event emitted when a remote requests a tracing endpoint.""" + + @property + def requested_receivers(self) -> List[ReceiverProtocol]: + """List of receiver protocols that have been requested.""" + relation = self.relation + app = relation.app + if not app: + raise NotReadyError("relation.app is None") + + return TracingRequirerAppData.load(relation.data[app]).receivers + + +class BrokenEvent(RelationBrokenEvent): + """Event emitted when a relation on tracing is broken.""" + + +class TracingEndpointProviderEvents(CharmEvents): + """TracingEndpointProvider events.""" + + request = EventSource(RequestEvent) + broken = EventSource(BrokenEvent) + + +class TracingEndpointProvider(Object): + """Class representing a trace receiver service.""" + + on = TracingEndpointProviderEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + external_url: Optional[str] = None, + relation_name: str = DEFAULT_RELATION_NAME, + ): + """Initialize. + + Args: + charm: a `CharmBase` instance that manages this instance of the Tempo service. + external_url: external address of the node hosting the tempo server, + if an ingress is present. + relation_name: an optional string name of the relation between `charm` + and the Tempo charmed service. The default is "tracing". + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the `tracing` relation + interface. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the `RelationRole.requires` + role. + """ + _validate_relation_by_interface_and_direction( + charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.provides + ) + + super().__init__(charm, relation_name + "tracing-provider") + self._charm = charm + self._external_url = external_url + self._relation_name = relation_name + self.framework.observe( + self._charm.on[relation_name].relation_joined, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_created, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_changed, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_broken, self._on_relation_broken_event + ) + + def _on_relation_broken_event(self, e: RelationBrokenEvent): + """Handle relation broken events.""" + self.on.broken.emit(e.relation) + + def _on_relation_event(self, e: RelationEvent): + """Handle relation created/joined/changed events.""" + if self.is_requirer_ready(e.relation): + self.on.request.emit(e.relation) + + def is_requirer_ready(self, relation: Relation): + """Attempt to determine if requirer has already populated app data.""" + try: + self._get_requested_protocols(relation) + except NotReadyError: + return False + return True + + @staticmethod + def _get_requested_protocols(relation: Relation): + app = relation.app + if not app: + raise NotReadyError("relation.app is None") + + try: + databag = TracingRequirerAppData.load(relation.data[app]) + except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): + logger.info(f"relation {relation} is not ready to talk tracing") + raise NotReadyError() + return databag.receivers + + def requested_protocols(self): + """All receiver protocols that have been requested by our related apps.""" + requested_protocols = set() + for relation in self.relations: + try: + protocols = self._get_requested_protocols(relation) + except NotReadyError: + continue + requested_protocols.update(protocols) + return requested_protocols + + @property + def relations(self) -> List[Relation]: + """All relations active on this endpoint.""" + return self._charm.model.relations[self._relation_name] + + def publish_receivers(self, receivers: Sequence[RawReceiver]): + """Let all requirers know that these receivers are active and listening.""" + if not self._charm.unit.is_leader(): + raise RuntimeError("only leader can do this") + + for relation in self.relations: + try: + TracingProviderAppData( + receivers=[ + Receiver( + url=url, + protocol=ProtocolType( + name=protocol, + type=receiver_protocol_to_transport_protocol[protocol], + ), + ) + for protocol, url in receivers + ], + ).dump(relation.data[self._charm.app]) + + except ModelError as e: + # args are bytes + msg = e.args[0] + if isinstance(msg, bytes): + if msg.startswith( + b"ERROR cannot read relation application settings: permission denied" + ): + logger.error( + f"encountered error {e} while attempting to update_relation_data." + f"The relation must be gone." + ) + continue + raise + + +class EndpointRemovedEvent(RelationBrokenEvent): + """Event representing a change in one of the receiver endpoints.""" + + +class EndpointChangedEvent(_AutoSnapshotEvent): + """Event representing a change in one of the receiver endpoints.""" + + __args__ = ("_receivers",) + + if TYPE_CHECKING: + _receivers = [] # type: List[dict] + + @property + def receivers(self) -> List[Receiver]: + """Cast receivers back from dict.""" + return [Receiver(**i) for i in self._receivers] + + +class TracingEndpointRequirerEvents(CharmEvents): + """TracingEndpointRequirer events.""" + + endpoint_changed = EventSource(EndpointChangedEvent) + endpoint_removed = EventSource(EndpointRemovedEvent) + + +class TracingEndpointRequirer(Object): + """A tracing endpoint for Tempo.""" + + on = TracingEndpointRequirerEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + relation_name: str = DEFAULT_RELATION_NAME, + protocols: Optional[List[ReceiverProtocol]] = None, + ): + """Construct a tracing requirer for a Tempo charm. + + If your application supports pushing traces to a distributed tracing backend, the + `TracingEndpointRequirer` object enables your charm to easily access endpoint information + exchanged over a `tracing` relation interface. + + Args: + charm: a `CharmBase` object that manages this + `TracingEndpointRequirer` object. Typically, this is `self` in the instantiating + class. + relation_name: an optional string name of the relation between `charm` + and the Tempo charmed service. The default is "tracing". It is strongly + advised not to change the default, so that people deploying your charm will have a + consistent experience with all other charms that provide tracing endpoints. + protocols: optional list of protocols that the charm intends to send traces with. + The provider will enable receivers for these and only these protocols, + so be sure to enable all protocols the charm or its workload are going to need. + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the `tracing` relation + interface. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the `RelationRole.provides` + role. + """ + _validate_relation_by_interface_and_direction( + charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.requires + ) + + super().__init__(charm, relation_name) + + self._is_single_endpoint = charm.meta.relations[relation_name].limit == 1 + + self._charm = charm + self._relation_name = relation_name + + events = self._charm.on[self._relation_name] + self.framework.observe(events.relation_changed, self._on_tracing_relation_changed) + self.framework.observe(events.relation_broken, self._on_tracing_relation_broken) + + if protocols: + self.request_protocols(protocols) + + def request_protocols( + self, protocols: Sequence[ReceiverProtocol], relation: Optional[Relation] = None + ): + """Publish the list of protocols which the provider should activate.""" + # todo: should we check if _is_single_endpoint and len(self.relations) > 1 and raise, here? + relations = [relation] if relation else self.relations + + if not protocols: + # empty sequence + raise ValueError( + "You need to pass a nonempty sequence of protocols to `request_protocols`." + ) + + try: + if self._charm.unit.is_leader(): + for relation in relations: + TracingRequirerAppData( + receivers=list(protocols), + ).dump(relation.data[self._charm.app]) + + except ModelError as e: + # args are bytes + msg = e.args[0] + if isinstance(msg, bytes): + if msg.startswith( + b"ERROR cannot read relation application settings: permission denied" + ): + logger.error( + f"encountered error {e} while attempting to request_protocols." + f"The relation must be gone." + ) + return + raise + + @property + def relations(self) -> List[Relation]: + """The tracing relations associated with this endpoint.""" + return self._charm.model.relations[self._relation_name] + + @property + def _relation(self) -> Optional[Relation]: + """If this wraps a single endpoint, the relation bound to it, if any.""" + if not self._is_single_endpoint: + objname = type(self).__name__ + raise AmbiguousRelationUsageError( + f"This {objname} wraps a {self._relation_name} endpoint that has " + "limit != 1. We can't determine what relation, of the possibly many, you are " + f"talking about. Please pass a relation instance while calling {objname}, " + "or set limit=1 in the charm metadata." + ) + relations = self.relations + return relations[0] if relations else None + + def is_ready(self, relation: Optional[Relation] = None): + """Is this endpoint ready?""" + relation = relation or self._relation + if not relation: + logger.debug(f"no relation on {self._relation_name !r}: tracing not ready") + return False + if relation.data is None: + logger.error(f"relation data is None for {relation}") + return False + if not relation.app: + logger.error(f"{relation} event received but there is no relation.app") + return False + try: + databag = dict(relation.data[relation.app]) + TracingProviderAppData.load(databag) + + except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): + logger.info(f"failed validating relation data for {relation}") + return False + return True + + def _on_tracing_relation_changed(self, event): + """Notify the providers that there is new endpoint information available.""" + relation = event.relation + if not self.is_ready(relation): + self.on.endpoint_removed.emit(relation) # type: ignore + return + + data = TracingProviderAppData.load(relation.data[relation.app]) + self.on.endpoint_changed.emit(relation, [i.dict() for i in data.receivers]) # type: ignore + + def _on_tracing_relation_broken(self, event: RelationBrokenEvent): + """Notify the providers that the endpoint is broken.""" + relation = event.relation + self.on.endpoint_removed.emit(relation) # type: ignore + + def get_all_endpoints( + self, relation: Optional[Relation] = None + ) -> Optional[TracingProviderAppData]: + """Unmarshalled relation data.""" + relation = relation or self._relation + if not self.is_ready(relation): + return + return TracingProviderAppData.load(relation.data[relation.app]) # type: ignore + + def _get_endpoint( + self, relation: Optional[Relation], protocol: ReceiverProtocol + ) -> Optional[str]: + app_data = self.get_all_endpoints(relation) + if not app_data: + return None + receivers: List[Receiver] = list( + filter(lambda i: i.protocol.name == protocol, app_data.receivers) + ) + if not receivers: + # it can happen if the charm requests tracing protocols, but the relay (such as grafana-agent) isn't yet + # connected to the tracing backend. In this case, it's not an error the charm author can do anything about + logger.warning(f"no receiver found with protocol={protocol!r}.") + return + if len(receivers) > 1: + # if we have more than 1 receiver that matches, it shouldn't matter which receiver we'll be using. + logger.warning( + f"too many receivers with protocol={protocol!r}; using first one. Found: {receivers}" + ) + + receiver = receivers[0] + return receiver.url + + def get_endpoint( + self, protocol: ReceiverProtocol, relation: Optional[Relation] = None + ) -> Optional[str]: + """Receiver endpoint for the given protocol. + + It could happen that this function gets called before the provider publishes the endpoints. + In such a scenario, if a non-leader unit calls this function, a permission denied exception will be raised due to + restricted access. To prevent this, this function needs to be guarded by the `is_ready` check. + + Raises: + ProtocolNotRequestedError: + If the charm unit is the leader unit and attempts to obtain an endpoint for a protocol it did not request. + """ + endpoint = self._get_endpoint(relation or self._relation, protocol=protocol) + if not endpoint: + requested_protocols = set() + relations = [relation] if relation else self.relations + for relation in relations: + try: + databag = TracingRequirerAppData.load(relation.data[self._charm.app]) + except DataValidationError: + continue + + requested_protocols.update(databag.receivers) + + if protocol not in requested_protocols: + raise ProtocolNotRequestedError(protocol, relation) + + return None + return endpoint + + +def charm_tracing_config( + endpoint_requirer: TracingEndpointRequirer, cert_path: Optional[Union[Path, str]] +) -> Tuple[Optional[str], Optional[str]]: + """Return the charm_tracing config you likely want. + + If no endpoint is provided: + disable charm tracing. + If https endpoint is provided but cert_path is not found on disk: + disable charm tracing. + If https endpoint is provided and cert_path is None: + ERROR + Else: + proceed with charm tracing (with or without tls, as appropriate) + + Usage: + If you are using charm_tracing >= v1.9: + >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config + >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") + >>> class MyCharm(...): + >>> _cert_path = "/path/to/cert/on/charm/container.crt" + >>> def __init__(self, ...): + >>> self.tracing = TracingEndpointRequirer(...) + >>> self.my_endpoint, self.cert_path = charm_tracing_config( + ... self.tracing, self._cert_path) + + If you are using charm_tracing < v1.9: + >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config + >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") + >>> class MyCharm(...): + >>> _cert_path = "/path/to/cert/on/charm/container.crt" + >>> def __init__(self, ...): + >>> self.tracing = TracingEndpointRequirer(...) + >>> self._my_endpoint, self._cert_path = charm_tracing_config( + ... self.tracing, self._cert_path) + >>> @property + >>> def my_endpoint(self): + >>> return self._my_endpoint + >>> @property + >>> def cert_path(self): + >>> return self._cert_path + + """ + if not endpoint_requirer.is_ready(): + return None, None + + endpoint = endpoint_requirer.get_endpoint("otlp_http") + if not endpoint: + return None, None + + is_https = endpoint.startswith("https://") + + if is_https: + if cert_path is None or not Path(cert_path).exists(): + # disable charm tracing until we obtain a cert to prevent tls errors + logger.error( + "Tracing endpoint is https, but no server_cert has been passed." + "Please point @trace_charm to a `server_cert` attr. " + "This might also mean that the tracing provider is related to a " + "certificates provider, but this application is not (yet). " + "In that case, you might just have to wait a bit for the certificates " + "integration to settle. " + ) + return None, None + return endpoint, str(cert_path) + else: + return endpoint, None diff --git a/examples/django/django_tracing_app/django_app/django_app/__init__.py b/examples/django/django_tracing_app/django_app/django_app/__init__.py new file mode 100644 index 0000000..e3979c0 --- /dev/null +++ b/examples/django/django_tracing_app/django_app/django_app/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/examples/django/django_tracing_app/django_app/django_app/asgi.py b/examples/django/django_tracing_app/django_app/django_app/asgi.py new file mode 100644 index 0000000..1616128 --- /dev/null +++ b/examples/django/django_tracing_app/django_app/django_app/asgi.py @@ -0,0 +1,19 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +""" +ASGI config for django_app project. + +It exposes the ASGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/5.0/howto/deployment/asgi/ +""" + +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_app.settings") + +application = get_asgi_application() diff --git a/examples/django/django_tracing_app/django_app/django_app/settings.py b/examples/django/django_tracing_app/django_app/django_app/settings.py new file mode 100644 index 0000000..22e0102 --- /dev/null +++ b/examples/django/django_tracing_app/django_app/django_app/settings.py @@ -0,0 +1,130 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +""" +Django settings for django_app project. + +Generated by 'django-admin startproject' using Django 5.0.2. + +For more information on this file, see +https://docs.djangoproject.com/en/5.0/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/5.0/ref/settings/ +""" + +import json +import os +import urllib.parse +from pathlib import Path + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/5.0/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY", "secret") + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = os.environ.get("DJANGO_DEBUG", "true") == "true" + +ALLOWED_HOSTS = json.loads(os.environ.get("DJANGO_ALLOWED_HOSTS", '["*"]')) + + +INSTALLED_APPS = [ + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", +] + +MIDDLEWARE = [ + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", +] + +ROOT_URLCONF = "django_app.urls" + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] + +WSGI_APPLICATION = "django_app.wsgi.application" + + +# Database +# https://docs.djangoproject.com/en/5.0/ref/settings/#databases +DATABASES = { + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": os.environ.get("POSTGRESQL_DB_NAME"), + "USER": os.environ.get("POSTGRESQL_DB_USERNAME"), + "PASSWORD": os.environ.get("POSTGRESQL_DB_PASSWORD"), + "HOST": os.environ.get("POSTGRESQL_DB_HOSTNAME"), + "PORT": os.environ.get("POSTGRESQL_DB_PORT", "5432"), + } +} + + +# Password validation +# https://docs.djangoproject.com/en/5.0/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", + }, +] + + +# Internationalization +# https://docs.djangoproject.com/en/5.0/topics/i18n/ + +LANGUAGE_CODE = "en-us" + +TIME_ZONE = "UTC" + +USE_I18N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/5.0/howto/static-files/ + +STATIC_URL = "static/" + +# Default primary key field type +# https://docs.djangoproject.com/en/5.0/ref/settings/#default-auto-field + +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" diff --git a/examples/django/django_tracing_app/django_app/django_app/urls.py b/examples/django/django_tracing_app/django_app/django_app/urls.py new file mode 100644 index 0000000..fc8c0b0 --- /dev/null +++ b/examples/django/django_tracing_app/django_app/django_app/urls.py @@ -0,0 +1,33 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +""" +URL configuration for django_app project. + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/5.0/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" + +from django.contrib import admin +from django.urls import path +from testing.views import environ, get_settings, login, my_view, sleep, user_count + +urlpatterns = [ + path("admin/", admin.site.urls), + path("settings/", get_settings, name="get_settings"), + path("len/users", user_count, name="user_count"), + path("environ", environ, name="environ"), + path("sleep", sleep, name="sleep"), + path("hello", my_view, name="my_view"), + path("login", login, name="login"), +] diff --git a/examples/django/django_tracing_app/django_app/django_app/wsgi.py b/examples/django/django_tracing_app/django_app/django_app/wsgi.py new file mode 100644 index 0000000..f422221 --- /dev/null +++ b/examples/django/django_tracing_app/django_app/django_app/wsgi.py @@ -0,0 +1,19 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +""" +WSGI config for django_app project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/5.0/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_app.settings") + +application = get_wsgi_application() diff --git a/examples/django/django_tracing_app/django_app/manage.py b/examples/django/django_tracing_app/django_app/manage.py new file mode 100755 index 0000000..cf94f1f --- /dev/null +++ b/examples/django/django_tracing_app/django_app/manage.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + """Run administrative tasks.""" + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_app.settings") + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == "__main__": + main() diff --git a/examples/django/django_tracing_app/django_app/migrate.sh b/examples/django/django_tracing_app/django_app/migrate.sh new file mode 100644 index 0000000..ce3a73c --- /dev/null +++ b/examples/django/django_tracing_app/django_app/migrate.sh @@ -0,0 +1,5 @@ +#!/usr/bin/bash +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +python3 manage.py migrate diff --git a/examples/django/django_tracing_app/django_app/testing/__init__.py b/examples/django/django_tracing_app/django_app/testing/__init__.py new file mode 100644 index 0000000..e3979c0 --- /dev/null +++ b/examples/django/django_tracing_app/django_app/testing/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/examples/django/django_tracing_app/django_app/testing/admin.py b/examples/django/django_tracing_app/django_app/testing/admin.py new file mode 100644 index 0000000..b111777 --- /dev/null +++ b/examples/django/django_tracing_app/django_app/testing/admin.py @@ -0,0 +1,6 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +from django.contrib import admin + +# Register your models here. diff --git a/examples/django/django_tracing_app/django_app/testing/apps.py b/examples/django/django_tracing_app/django_app/testing/apps.py new file mode 100644 index 0000000..f435e0c --- /dev/null +++ b/examples/django/django_tracing_app/django_app/testing/apps.py @@ -0,0 +1,9 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +from django.apps import AppConfig + + +class TestingConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "testing" diff --git a/examples/django/django_tracing_app/django_app/testing/migrations/__init__.py b/examples/django/django_tracing_app/django_app/testing/migrations/__init__.py new file mode 100644 index 0000000..e3979c0 --- /dev/null +++ b/examples/django/django_tracing_app/django_app/testing/migrations/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/examples/django/django_tracing_app/django_app/testing/models.py b/examples/django/django_tracing_app/django_app/testing/models.py new file mode 100644 index 0000000..dde0a81 --- /dev/null +++ b/examples/django/django_tracing_app/django_app/testing/models.py @@ -0,0 +1,6 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +from django.db import models + +# Create your models here. diff --git a/examples/django/django_tracing_app/django_app/testing/tests.py b/examples/django/django_tracing_app/django_app/testing/tests.py new file mode 100644 index 0000000..922bda5 --- /dev/null +++ b/examples/django/django_tracing_app/django_app/testing/tests.py @@ -0,0 +1,6 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +from django.test import TestCase + +# Create your tests here. diff --git a/examples/django/django_tracing_app/django_app/testing/views.py b/examples/django/django_tracing_app/django_app/testing/views.py new file mode 100644 index 0000000..991b30c --- /dev/null +++ b/examples/django/django_tracing_app/django_app/testing/views.py @@ -0,0 +1,49 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +import os +import time + +from django.conf import settings +from django.contrib.auth import authenticate +from django.contrib.auth.models import User +from django.http import HttpResponse, JsonResponse +from opentelemetry import trace + +tracer = trace.get_tracer(__name__) + + +def environ(request): + return JsonResponse(dict(os.environ)) + + +def user_count(request): + return JsonResponse(User.objects.count(), safe=False) + + +def get_settings(request, name): + if hasattr(settings, name): + return JsonResponse(getattr(settings, name), safe=False) + else: + return JsonResponse({"error": f"settings {name!r} not found"}, status=404) + + +def my_view(request): + # Create a custom span + with tracer.start_as_current_span("custom-span"): + print("Hello, World!!!") + return HttpResponse("Hello, World!") + + +def sleep(request): + duration = request.GET.get("duration") + time.sleep(int(duration)) + return HttpResponse() + + +def login(request): + user = authenticate(username=request.GET.get("username"), password=request.GET.get("password")) + if user is not None: + return HttpResponse(status=200) + else: + return HttpResponse(status=403) diff --git a/examples/django/django_tracing_app/requirements.txt b/examples/django/django_tracing_app/requirements.txt new file mode 100644 index 0000000..f026b65 --- /dev/null +++ b/examples/django/django_tracing_app/requirements.txt @@ -0,0 +1,9 @@ +Django +tzdata +psycopg2-binary +opentelemetry-api +opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http +opentelemetry-instrumentation +opentelemetry-instrumentation-wsgi +opentelemetry-sdk diff --git a/examples/django/django_tracing_app/rockcraft.yaml b/examples/django/django_tracing_app/rockcraft.yaml new file mode 100644 index 0000000..7834eac --- /dev/null +++ b/examples/django/django_tracing_app/rockcraft.yaml @@ -0,0 +1,14 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +name: django-tracing-app +summary: Example Django application image. +description: Example Django application image. +version: "0.1" +base: ubuntu@22.04 +license: Apache-2.0 +platforms: + amd64: + +extensions: + - django-framework diff --git a/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py new file mode 100644 index 0000000..1dd78b5 --- /dev/null +++ b/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -0,0 +1,1000 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +"""## Overview. + +This document explains how to integrate with the Tempo charm for the purpose of pushing traces to a +tracing endpoint provided by Tempo. It also explains how alternative implementations of the Tempo charm +may maintain the same interface and be backward compatible with all currently integrated charms. + +## Requirer Library Usage + +Charms seeking to push traces to Tempo, must do so using the `TracingEndpointRequirer` +object from this charm library. For the simplest use cases, using the `TracingEndpointRequirer` +object only requires instantiating it, typically in the constructor of your charm. The +`TracingEndpointRequirer` constructor requires the name of the relation over which a tracing endpoint + is exposed by the Tempo charm, and a list of protocols it intends to send traces with. + This relation must use the `tracing` interface. + The `TracingEndpointRequirer` object may be instantiated as follows + + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer + + def __init__(self, *args): + super().__init__(*args) + # ... + self.tracing = TracingEndpointRequirer(self, + protocols=['otlp_grpc', 'otlp_http', 'jaeger_http_thrift'] + ) + # ... + +Note that the first argument (`self`) to `TracingEndpointRequirer` is always a reference to the +parent charm. + +Alternatively to providing the list of requested protocols at init time, the charm can do it at +any point in time by calling the +`TracingEndpointRequirer.request_protocols(*protocol:str, relation:Optional[Relation])` method. +Using this method also allows you to use per-relation protocols. + +Units of requirer charms obtain the tempo endpoint to which they will push their traces by calling +`TracingEndpointRequirer.get_endpoint(protocol: str)`, where `protocol` is, for example: +- `otlp_grpc` +- `otlp_http` +- `zipkin` +- `tempo` + +If the `protocol` is not in the list of protocols that the charm requested at endpoint set-up time, +the library will raise an error. + +We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests +go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down. + +## Provider Library Usage + +The `TracingEndpointProvider` object may be used by charms to manage relations with their +trace sources. For this purposes a Tempo-like charm needs to do two things + +1. Instantiate the `TracingEndpointProvider` object by providing it a +reference to the parent (Tempo) charm and optionally the name of the relation that the Tempo charm +uses to interact with its trace sources. This relation must conform to the `tracing` interface +and it is strongly recommended that this relation be named `tracing` which is its +default value. + +For example a Tempo charm may instantiate the `TracingEndpointProvider` in its constructor as +follows + + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointProvider + + def __init__(self, *args): + super().__init__(*args) + # ... + self.tracing = TracingEndpointProvider(self) + # ... + + + +""" # noqa: W505 +import enum +import json +import logging +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Dict, + List, + Literal, + MutableMapping, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +import pydantic +from ops.charm import CharmBase, CharmEvents, RelationBrokenEvent, RelationEvent, RelationRole +from ops.framework import EventSource, Object +from ops.model import ModelError, Relation +from pydantic import BaseModel, Field + +# The unique Charmhub library identifier, never change it +LIBID = "d2f02b1f8d1244b5989fd55bc3a28943" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 4 + +PYDEPS = ["pydantic"] + +logger = logging.getLogger(__name__) + +DEFAULT_RELATION_NAME = "tracing" +RELATION_INTERFACE_NAME = "tracing" + +# Supported list rationale https://github.com/canonical/tempo-coordinator-k8s-operator/issues/8 +ReceiverProtocol = Literal[ + "zipkin", + "otlp_grpc", + "otlp_http", + "jaeger_grpc", + "jaeger_thrift_http", +] + +RawReceiver = Tuple[ReceiverProtocol, str] +"""Helper type. A raw receiver is defined as a tuple consisting of the protocol name, and the (external, if available), +(secured, if available) resolvable server url. +""" + +BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"} + + +class TransportProtocolType(str, enum.Enum): + """Receiver Type.""" + + http = "http" + grpc = "grpc" + + +receiver_protocol_to_transport_protocol: Dict[ReceiverProtocol, TransportProtocolType] = { + "zipkin": TransportProtocolType.http, + "otlp_grpc": TransportProtocolType.grpc, + "otlp_http": TransportProtocolType.http, + "jaeger_thrift_http": TransportProtocolType.http, + "jaeger_grpc": TransportProtocolType.grpc, +} +"""A mapping between telemetry protocols and their corresponding transport protocol. +""" + + +class TracingError(Exception): + """Base class for custom errors raised by this library.""" + + +class NotReadyError(TracingError): + """Raised by the provider wrapper if a requirer hasn't published the required data (yet).""" + + +class ProtocolNotRequestedError(TracingError): + """Raised if the user attempts to obtain an endpoint for a protocol it did not request.""" + + +class DataValidationError(TracingError): + """Raised when data validation fails on IPU relation data.""" + + +class AmbiguousRelationUsageError(TracingError): + """Raised when one wrongly assumes that there can only be one relation on an endpoint.""" + + +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class DatabagModel(BaseModel): # type: ignore + """Base databag model.""" + + class Config: + """Pydantic config.""" + + # ignore any extra fields in the databag + extra = "ignore" + """Ignore any extra fields in the databag.""" + allow_population_by_field_name = True + """Allow instantiating this class by field name (instead of forcing alias).""" + + _NEST_UNDER = None + + @classmethod + def load(cls, databag: MutableMapping): + """Load this model from a Juju databag.""" + if cls._NEST_UNDER: + return cls.parse_obj(json.loads(databag[cls._NEST_UNDER])) + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {f.alias for f in cls.__fields__.values()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.parse_raw(json.dumps(data)) # type: ignore + except pydantic.ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): + """Write the contents of this model to Juju databag. + + :param databag: the databag to write the data to. + :param clear: ensure the databag is cleared before writing it. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + + if self._NEST_UNDER: + databag[self._NEST_UNDER] = self.json(by_alias=True) + return databag + + dct = self.dict() + for key, field in self.__fields__.items(): # type: ignore + value = dct[key] + databag[field.alias or key] = json.dumps(value) + + return databag + +else: + from pydantic import ConfigDict + + class DatabagModel(BaseModel): + """Base databag model.""" + + model_config = ConfigDict( + # ignore any extra fields in the databag + extra="ignore", + # Allow instantiating this class by field name (instead of forcing alias). + populate_by_name=True, + # Custom config key: whether to nest the whole datastructure (as json) + # under a field or spread it out at the toplevel. + _NEST_UNDER=None, # type: ignore + ) + """Pydantic config.""" + + @classmethod + def load(cls, databag: MutableMapping): + """Load this model from a Juju databag.""" + nest_under = cls.model_config.get("_NEST_UNDER") # type: ignore + if nest_under: + return cls.model_validate(json.loads(databag[nest_under])) # type: ignore + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {(f.alias or n) for n, f in cls.__fields__.items()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.model_validate_json(json.dumps(data)) # type: ignore + except pydantic.ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): + """Write the contents of this model to Juju databag. + + :param databag: the databag to write the data to. + :param clear: ensure the databag is cleared before writing it. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + nest_under = self.model_config.get("_NEST_UNDER") + if nest_under: + databag[nest_under] = self.model_dump_json( # type: ignore + by_alias=True, + # skip keys whose values are default + exclude_defaults=True, + ) + return databag + + dct = self.model_dump() # type: ignore + for key, field in self.model_fields.items(): # type: ignore + value = dct[key] + if value == field.default: + continue + databag[field.alias or key] = json.dumps(value) + + return databag + + +# todo use models from charm-relation-interfaces +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class ProtocolType(BaseModel): # type: ignore + """Protocol Type.""" + + class Config: + """Pydantic config.""" + + use_enum_values = True + """Allow serializing enum values.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + +else: + + class ProtocolType(BaseModel): + """Protocol Type.""" + + model_config = ConfigDict( # type: ignore + # Allow serializing enum values. + use_enum_values=True + ) + """Pydantic config.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + + +class Receiver(BaseModel): + """Specification of an active receiver.""" + + protocol: ProtocolType = Field(..., description="Receiver protocol name and type.") + url: str = Field( + ..., + description="""URL at which the receiver is reachable. If there's an ingress, it would be the external URL. + Otherwise, it would be the service's fqdn or internal IP. + If the protocol type is grpc, the url will not contain a scheme.""", + examples=[ + "http://traefik_address:2331", + "https://traefik_address:2331", + "http://tempo_public_ip:2331", + "https://tempo_public_ip:2331", + "tempo_public_ip:2331", + ], + ) + + +class TracingProviderAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing provider.""" + + receivers: List[Receiver] = Field( + ..., + description="List of all receivers enabled on the tracing provider.", + ) + + +class TracingRequirerAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing requirer.""" + + receivers: List[ReceiverProtocol] + """Requested receivers.""" + + +class _AutoSnapshotEvent(RelationEvent): + __args__: Tuple[str, ...] = () + __optional_kwargs__: Dict[str, Any] = {} + + @classmethod + def __attrs__(cls): + return cls.__args__ + tuple(cls.__optional_kwargs__.keys()) + + def __init__(self, handle, relation, *args, **kwargs): + super().__init__(handle, relation) + + if not len(self.__args__) == len(args): + raise TypeError("expected {} args, got {}".format(len(self.__args__), len(args))) + + for attr, obj in zip(self.__args__, args): + setattr(self, attr, obj) + for attr, default in self.__optional_kwargs__.items(): + obj = kwargs.get(attr, default) + setattr(self, attr, obj) + + def snapshot(self) -> dict: + dct = super().snapshot() + for attr in self.__attrs__(): + obj = getattr(self, attr) + try: + dct[attr] = obj + except ValueError as e: + raise ValueError( + "cannot automagically serialize {}: " + "override this method and do it " + "manually.".format(obj) + ) from e + + return dct + + def restore(self, snapshot: dict) -> None: + super().restore(snapshot) + for attr, obj in snapshot.items(): + setattr(self, attr, obj) + + +class RelationNotFoundError(Exception): + """Raised if no relation with the given name is found.""" + + def __init__(self, relation_name: str): + self.relation_name = relation_name + self.message = "No relation named '{}' found".format(relation_name) + super().__init__(self.message) + + +class RelationInterfaceMismatchError(Exception): + """Raised if the relation with the given name has an unexpected interface.""" + + def __init__( + self, + relation_name: str, + expected_relation_interface: str, + actual_relation_interface: str, + ): + self.relation_name = relation_name + self.expected_relation_interface = expected_relation_interface + self.actual_relation_interface = actual_relation_interface + self.message = ( + "The '{}' relation has '{}' as interface rather than the expected '{}'".format( + relation_name, actual_relation_interface, expected_relation_interface + ) + ) + + super().__init__(self.message) + + +class RelationRoleMismatchError(Exception): + """Raised if the relation with the given name has a different role than expected.""" + + def __init__( + self, + relation_name: str, + expected_relation_role: RelationRole, + actual_relation_role: RelationRole, + ): + self.relation_name = relation_name + self.expected_relation_interface = expected_relation_role + self.actual_relation_role = actual_relation_role + self.message = "The '{}' relation has role '{}' rather than the expected '{}'".format( + relation_name, repr(actual_relation_role), repr(expected_relation_role) + ) + + super().__init__(self.message) + + +def _validate_relation_by_interface_and_direction( + charm: CharmBase, + relation_name: str, + expected_relation_interface: str, + expected_relation_role: RelationRole, +): + """Validate a relation. + + Verifies that the `relation_name` provided: (1) exists in metadata.yaml, + (2) declares as interface the interface name passed as `relation_interface` + and (3) has the right "direction", i.e., it is a relation that `charm` + provides or requires. + + Args: + charm: a `CharmBase` object to scan for the matching relation. + relation_name: the name of the relation to be verified. + expected_relation_interface: the interface name to be matched by the + relation named `relation_name`. + expected_relation_role: whether the `relation_name` must be either + provided or required by `charm`. + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the same relation interface + as specified via the `expected_relation_interface` argument. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the same role as specified + via the `expected_relation_role` argument. + """ + if relation_name not in charm.meta.relations: + raise RelationNotFoundError(relation_name) + + relation = charm.meta.relations[relation_name] + + # fixme: why do we need to cast here? + actual_relation_interface = cast(str, relation.interface_name) + + if actual_relation_interface != expected_relation_interface: + raise RelationInterfaceMismatchError( + relation_name, expected_relation_interface, actual_relation_interface + ) + + if expected_relation_role is RelationRole.provides: + if relation_name not in charm.meta.provides: + raise RelationRoleMismatchError( + relation_name, RelationRole.provides, RelationRole.requires + ) + elif expected_relation_role is RelationRole.requires: + if relation_name not in charm.meta.requires: + raise RelationRoleMismatchError( + relation_name, RelationRole.requires, RelationRole.provides + ) + else: + raise TypeError("Unexpected RelationDirection: {}".format(expected_relation_role)) + + +class RequestEvent(RelationEvent): + """Event emitted when a remote requests a tracing endpoint.""" + + @property + def requested_receivers(self) -> List[ReceiverProtocol]: + """List of receiver protocols that have been requested.""" + relation = self.relation + app = relation.app + if not app: + raise NotReadyError("relation.app is None") + + return TracingRequirerAppData.load(relation.data[app]).receivers + + +class BrokenEvent(RelationBrokenEvent): + """Event emitted when a relation on tracing is broken.""" + + +class TracingEndpointProviderEvents(CharmEvents): + """TracingEndpointProvider events.""" + + request = EventSource(RequestEvent) + broken = EventSource(BrokenEvent) + + +class TracingEndpointProvider(Object): + """Class representing a trace receiver service.""" + + on = TracingEndpointProviderEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + external_url: Optional[str] = None, + relation_name: str = DEFAULT_RELATION_NAME, + ): + """Initialize. + + Args: + charm: a `CharmBase` instance that manages this instance of the Tempo service. + external_url: external address of the node hosting the tempo server, + if an ingress is present. + relation_name: an optional string name of the relation between `charm` + and the Tempo charmed service. The default is "tracing". + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the `tracing` relation + interface. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the `RelationRole.requires` + role. + """ + _validate_relation_by_interface_and_direction( + charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.provides + ) + + super().__init__(charm, relation_name + "tracing-provider") + self._charm = charm + self._external_url = external_url + self._relation_name = relation_name + self.framework.observe( + self._charm.on[relation_name].relation_joined, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_created, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_changed, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_broken, self._on_relation_broken_event + ) + + def _on_relation_broken_event(self, e: RelationBrokenEvent): + """Handle relation broken events.""" + self.on.broken.emit(e.relation) + + def _on_relation_event(self, e: RelationEvent): + """Handle relation created/joined/changed events.""" + if self.is_requirer_ready(e.relation): + self.on.request.emit(e.relation) + + def is_requirer_ready(self, relation: Relation): + """Attempt to determine if requirer has already populated app data.""" + try: + self._get_requested_protocols(relation) + except NotReadyError: + return False + return True + + @staticmethod + def _get_requested_protocols(relation: Relation): + app = relation.app + if not app: + raise NotReadyError("relation.app is None") + + try: + databag = TracingRequirerAppData.load(relation.data[app]) + except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): + logger.info(f"relation {relation} is not ready to talk tracing") + raise NotReadyError() + return databag.receivers + + def requested_protocols(self): + """All receiver protocols that have been requested by our related apps.""" + requested_protocols = set() + for relation in self.relations: + try: + protocols = self._get_requested_protocols(relation) + except NotReadyError: + continue + requested_protocols.update(protocols) + return requested_protocols + + @property + def relations(self) -> List[Relation]: + """All relations active on this endpoint.""" + return self._charm.model.relations[self._relation_name] + + def publish_receivers(self, receivers: Sequence[RawReceiver]): + """Let all requirers know that these receivers are active and listening.""" + if not self._charm.unit.is_leader(): + raise RuntimeError("only leader can do this") + + for relation in self.relations: + try: + TracingProviderAppData( + receivers=[ + Receiver( + url=url, + protocol=ProtocolType( + name=protocol, + type=receiver_protocol_to_transport_protocol[protocol], + ), + ) + for protocol, url in receivers + ], + ).dump(relation.data[self._charm.app]) + + except ModelError as e: + # args are bytes + msg = e.args[0] + if isinstance(msg, bytes): + if msg.startswith( + b"ERROR cannot read relation application settings: permission denied" + ): + logger.error( + f"encountered error {e} while attempting to update_relation_data." + f"The relation must be gone." + ) + continue + raise + + +class EndpointRemovedEvent(RelationBrokenEvent): + """Event representing a change in one of the receiver endpoints.""" + + +class EndpointChangedEvent(_AutoSnapshotEvent): + """Event representing a change in one of the receiver endpoints.""" + + __args__ = ("_receivers",) + + if TYPE_CHECKING: + _receivers = [] # type: List[dict] + + @property + def receivers(self) -> List[Receiver]: + """Cast receivers back from dict.""" + return [Receiver(**i) for i in self._receivers] + + +class TracingEndpointRequirerEvents(CharmEvents): + """TracingEndpointRequirer events.""" + + endpoint_changed = EventSource(EndpointChangedEvent) + endpoint_removed = EventSource(EndpointRemovedEvent) + + +class TracingEndpointRequirer(Object): + """A tracing endpoint for Tempo.""" + + on = TracingEndpointRequirerEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + relation_name: str = DEFAULT_RELATION_NAME, + protocols: Optional[List[ReceiverProtocol]] = None, + ): + """Construct a tracing requirer for a Tempo charm. + + If your application supports pushing traces to a distributed tracing backend, the + `TracingEndpointRequirer` object enables your charm to easily access endpoint information + exchanged over a `tracing` relation interface. + + Args: + charm: a `CharmBase` object that manages this + `TracingEndpointRequirer` object. Typically, this is `self` in the instantiating + class. + relation_name: an optional string name of the relation between `charm` + and the Tempo charmed service. The default is "tracing". It is strongly + advised not to change the default, so that people deploying your charm will have a + consistent experience with all other charms that provide tracing endpoints. + protocols: optional list of protocols that the charm intends to send traces with. + The provider will enable receivers for these and only these protocols, + so be sure to enable all protocols the charm or its workload are going to need. + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the `tracing` relation + interface. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the `RelationRole.provides` + role. + """ + _validate_relation_by_interface_and_direction( + charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.requires + ) + + super().__init__(charm, relation_name) + + self._is_single_endpoint = charm.meta.relations[relation_name].limit == 1 + + self._charm = charm + self._relation_name = relation_name + + events = self._charm.on[self._relation_name] + self.framework.observe(events.relation_changed, self._on_tracing_relation_changed) + self.framework.observe(events.relation_broken, self._on_tracing_relation_broken) + + if protocols: + self.request_protocols(protocols) + + def request_protocols( + self, protocols: Sequence[ReceiverProtocol], relation: Optional[Relation] = None + ): + """Publish the list of protocols which the provider should activate.""" + # todo: should we check if _is_single_endpoint and len(self.relations) > 1 and raise, here? + relations = [relation] if relation else self.relations + + if not protocols: + # empty sequence + raise ValueError( + "You need to pass a nonempty sequence of protocols to `request_protocols`." + ) + + try: + if self._charm.unit.is_leader(): + for relation in relations: + TracingRequirerAppData( + receivers=list(protocols), + ).dump(relation.data[self._charm.app]) + + except ModelError as e: + # args are bytes + msg = e.args[0] + if isinstance(msg, bytes): + if msg.startswith( + b"ERROR cannot read relation application settings: permission denied" + ): + logger.error( + f"encountered error {e} while attempting to request_protocols." + f"The relation must be gone." + ) + return + raise + + @property + def relations(self) -> List[Relation]: + """The tracing relations associated with this endpoint.""" + return self._charm.model.relations[self._relation_name] + + @property + def _relation(self) -> Optional[Relation]: + """If this wraps a single endpoint, the relation bound to it, if any.""" + if not self._is_single_endpoint: + objname = type(self).__name__ + raise AmbiguousRelationUsageError( + f"This {objname} wraps a {self._relation_name} endpoint that has " + "limit != 1. We can't determine what relation, of the possibly many, you are " + f"talking about. Please pass a relation instance while calling {objname}, " + "or set limit=1 in the charm metadata." + ) + relations = self.relations + return relations[0] if relations else None + + def is_ready(self, relation: Optional[Relation] = None): + """Is this endpoint ready?""" + relation = relation or self._relation + if not relation: + logger.debug(f"no relation on {self._relation_name !r}: tracing not ready") + return False + if relation.data is None: + logger.error(f"relation data is None for {relation}") + return False + if not relation.app: + logger.error(f"{relation} event received but there is no relation.app") + return False + try: + databag = dict(relation.data[relation.app]) + TracingProviderAppData.load(databag) + + except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): + logger.info(f"failed validating relation data for {relation}") + return False + return True + + def _on_tracing_relation_changed(self, event): + """Notify the providers that there is new endpoint information available.""" + relation = event.relation + if not self.is_ready(relation): + self.on.endpoint_removed.emit(relation) # type: ignore + return + + data = TracingProviderAppData.load(relation.data[relation.app]) + self.on.endpoint_changed.emit(relation, [i.dict() for i in data.receivers]) # type: ignore + + def _on_tracing_relation_broken(self, event: RelationBrokenEvent): + """Notify the providers that the endpoint is broken.""" + relation = event.relation + self.on.endpoint_removed.emit(relation) # type: ignore + + def get_all_endpoints( + self, relation: Optional[Relation] = None + ) -> Optional[TracingProviderAppData]: + """Unmarshalled relation data.""" + relation = relation or self._relation + if not self.is_ready(relation): + return + return TracingProviderAppData.load(relation.data[relation.app]) # type: ignore + + def _get_endpoint( + self, relation: Optional[Relation], protocol: ReceiverProtocol + ) -> Optional[str]: + app_data = self.get_all_endpoints(relation) + if not app_data: + return None + receivers: List[Receiver] = list( + filter(lambda i: i.protocol.name == protocol, app_data.receivers) + ) + if not receivers: + # it can happen if the charm requests tracing protocols, but the relay (such as grafana-agent) isn't yet + # connected to the tracing backend. In this case, it's not an error the charm author can do anything about + logger.warning(f"no receiver found with protocol={protocol!r}.") + return + if len(receivers) > 1: + # if we have more than 1 receiver that matches, it shouldn't matter which receiver we'll be using. + logger.warning( + f"too many receivers with protocol={protocol!r}; using first one. Found: {receivers}" + ) + + receiver = receivers[0] + return receiver.url + + def get_endpoint( + self, protocol: ReceiverProtocol, relation: Optional[Relation] = None + ) -> Optional[str]: + """Receiver endpoint for the given protocol. + + It could happen that this function gets called before the provider publishes the endpoints. + In such a scenario, if a non-leader unit calls this function, a permission denied exception will be raised due to + restricted access. To prevent this, this function needs to be guarded by the `is_ready` check. + + Raises: + ProtocolNotRequestedError: + If the charm unit is the leader unit and attempts to obtain an endpoint for a protocol it did not request. + """ + endpoint = self._get_endpoint(relation or self._relation, protocol=protocol) + if not endpoint: + requested_protocols = set() + relations = [relation] if relation else self.relations + for relation in relations: + try: + databag = TracingRequirerAppData.load(relation.data[self._charm.app]) + except DataValidationError: + continue + + requested_protocols.update(databag.receivers) + + if protocol not in requested_protocols: + raise ProtocolNotRequestedError(protocol, relation) + + return None + return endpoint + + +def charm_tracing_config( + endpoint_requirer: TracingEndpointRequirer, cert_path: Optional[Union[Path, str]] +) -> Tuple[Optional[str], Optional[str]]: + """Return the charm_tracing config you likely want. + + If no endpoint is provided: + disable charm tracing. + If https endpoint is provided but cert_path is not found on disk: + disable charm tracing. + If https endpoint is provided and cert_path is None: + ERROR + Else: + proceed with charm tracing (with or without tls, as appropriate) + + Usage: + If you are using charm_tracing >= v1.9: + >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config + >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") + >>> class MyCharm(...): + >>> _cert_path = "/path/to/cert/on/charm/container.crt" + >>> def __init__(self, ...): + >>> self.tracing = TracingEndpointRequirer(...) + >>> self.my_endpoint, self.cert_path = charm_tracing_config( + ... self.tracing, self._cert_path) + + If you are using charm_tracing < v1.9: + >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config + >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") + >>> class MyCharm(...): + >>> _cert_path = "/path/to/cert/on/charm/container.crt" + >>> def __init__(self, ...): + >>> self.tracing = TracingEndpointRequirer(...) + >>> self._my_endpoint, self._cert_path = charm_tracing_config( + ... self.tracing, self._cert_path) + >>> @property + >>> def my_endpoint(self): + >>> return self._my_endpoint + >>> @property + >>> def cert_path(self): + >>> return self._cert_path + + """ + if not endpoint_requirer.is_ready(): + return None, None + + endpoint = endpoint_requirer.get_endpoint("otlp_http") + if not endpoint: + return None, None + + is_https = endpoint.startswith("https://") + + if is_https: + if cert_path is None or not Path(cert_path).exists(): + # disable charm tracing until we obtain a cert to prevent tls errors + logger.error( + "Tracing endpoint is https, but no server_cert has been passed." + "Please point @trace_charm to a `server_cert` attr. " + "This might also mean that the tracing provider is related to a " + "certificates provider, but this application is not (yet). " + "In that case, you might just have to wait a bit for the certificates " + "integration to settle. " + ) + return None, None + return endpoint, str(cert_path) + else: + return endpoint, None diff --git a/examples/fastapi/alembic.ini b/examples/fastapi/fastapi_app/alembic.ini similarity index 100% rename from examples/fastapi/alembic.ini rename to examples/fastapi/fastapi_app/alembic.ini diff --git a/examples/fastapi/alembic/env.py b/examples/fastapi/fastapi_app/alembic/env.py similarity index 100% rename from examples/fastapi/alembic/env.py rename to examples/fastapi/fastapi_app/alembic/env.py diff --git a/examples/fastapi/alembic/versions/eca6177bd16a_initial_migration.py b/examples/fastapi/fastapi_app/alembic/versions/eca6177bd16a_initial_migration.py similarity index 100% rename from examples/fastapi/alembic/versions/eca6177bd16a_initial_migration.py rename to examples/fastapi/fastapi_app/alembic/versions/eca6177bd16a_initial_migration.py diff --git a/examples/fastapi/app.py b/examples/fastapi/fastapi_app/app.py similarity index 80% rename from examples/fastapi/app.py rename to examples/fastapi/fastapi_app/app.py index d27d337..5f46f85 100644 --- a/examples/fastapi/app.py +++ b/examples/fastapi/fastapi_app/app.py @@ -3,17 +3,12 @@ import os from fastapi import FastAPI, HTTPException -from opentelemetry import trace -from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor from sqlalchemy import Column, Integer, String, create_engine, inspect from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker app = FastAPI() - -FastAPIInstrumentor.instrument_app(app) -tracer = trace.get_tracer(__name__) engine = create_engine(os.environ["POSTGRESQL_DB_CONNECT_STRING"], echo=True) Session = scoped_session(sessionmaker(bind=engine)) @@ -31,8 +26,7 @@ class User(Base): @app.get("/") async def root(): - with tracer.start_as_current_span("custom-span"): - return "Hello, World!" + return "Hello, World!" @app.get("/env/user-defined-config") diff --git a/examples/fastapi/migrate.sh b/examples/fastapi/fastapi_app/migrate.sh similarity index 100% rename from examples/fastapi/migrate.sh rename to examples/fastapi/fastapi_app/migrate.sh diff --git a/examples/fastapi/fastapi_app/requirements.txt b/examples/fastapi/fastapi_app/requirements.txt new file mode 100644 index 0000000..a697414 --- /dev/null +++ b/examples/fastapi/fastapi_app/requirements.txt @@ -0,0 +1,4 @@ +fastapi +SQLAlchemy +alembic +psycopg2-binary diff --git a/examples/fastapi/rockcraft.yaml b/examples/fastapi/fastapi_app/rockcraft.yaml similarity index 100% rename from examples/fastapi/rockcraft.yaml rename to examples/fastapi/fastapi_app/rockcraft.yaml diff --git a/examples/fastapi/fastapi_tracing_app/alembic.ini b/examples/fastapi/fastapi_tracing_app/alembic.ini new file mode 100644 index 0000000..353b047 --- /dev/null +++ b/examples/fastapi/fastapi_tracing_app/alembic.ini @@ -0,0 +1,5 @@ +; Copyright 2024 Canonical Ltd. +; See LICENSE file for licensing details. + +[alembic] +script_location = alembic diff --git a/examples/fastapi/fastapi_tracing_app/alembic/env.py b/examples/fastapi/fastapi_tracing_app/alembic/env.py new file mode 100644 index 0000000..63e0974 --- /dev/null +++ b/examples/fastapi/fastapi_tracing_app/alembic/env.py @@ -0,0 +1,24 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +import os +import sys + +from alembic import context + +sys.path.append(os.getcwd()) + +from app import Base, engine + +config = context.config +target_metadata = Base.metadata + + +def run_migrations(): + with engine.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + with context.begin_transaction(): + context.run_migrations() + + +run_migrations() diff --git a/examples/fastapi/fastapi_tracing_app/alembic/versions/eca6177bd16a_initial_migration.py b/examples/fastapi/fastapi_tracing_app/alembic/versions/eca6177bd16a_initial_migration.py new file mode 100644 index 0000000..18158e6 --- /dev/null +++ b/examples/fastapi/fastapi_tracing_app/alembic/versions/eca6177bd16a_initial_migration.py @@ -0,0 +1,33 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Initial migration + +Revision ID: eca6177bd16a +Revises: +Create Date: 2023-09-05 17:12:56.303534 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "eca6177bd16a" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade(): + op.create_table( + "users", + sa.Column("id", sa.Integer, primary_key=True), + sa.Column("username", sa.String(80), unique=True, nullable=False), + sa.Column("password", sa.String(256), nullable=False), + ) + + +def downgrade(): + op.drop_table("users") diff --git a/examples/fastapi/fastapi_tracing_app/app.py b/examples/fastapi/fastapi_tracing_app/app.py new file mode 100644 index 0000000..9871c12 --- /dev/null +++ b/examples/fastapi/fastapi_tracing_app/app.py @@ -0,0 +1,58 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +import os + +from fastapi import FastAPI, HTTPException +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter +from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter +from opentelemetry.trace import get_tracer_provider, set_tracer_provider +from sqlalchemy import Column, Integer, String, create_engine, inspect +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import scoped_session, sessionmaker + +app = FastAPI() + +set_tracer_provider(TracerProvider()) +get_tracer_provider().add_span_processor( + BatchSpanProcessor(OTLPSpanExporter()) +) + +get_tracer_provider().add_span_processor(BatchSpanProcessor(ConsoleSpanExporter())) +FastAPIInstrumentor.instrument_app(app) +tracer = trace.get_tracer(__name__) +engine = create_engine(os.environ["POSTGRESQL_DB_CONNECT_STRING"], echo=True) + +Session = scoped_session(sessionmaker(bind=engine)) + +Base = declarative_base() + + +class User(Base): + __tablename__ = "users" + + id = Column(Integer, primary_key=True) + username = Column(String(80), unique=True, nullable=False) + password = Column(String(256), nullable=False) + + +@app.get("/") +async def root(): + with tracer.start_as_current_span("custom-span"): + return "Hello, World!" + + +@app.get("/env/user-defined-config") +async def user_defined_config(): + return os.getenv("APP_USER_DEFINED_CONFIG", None) + + +@app.get("/table/{table}") +def test_table(table: str): + if inspect(engine).has_table(table): + return "SUCCESS" + else: + raise HTTPException(status_code=404, detail="Table not found") diff --git a/examples/fastapi/fastapi_tracing_app/migrate.sh b/examples/fastapi/fastapi_tracing_app/migrate.sh new file mode 100644 index 0000000..1a91ca1 --- /dev/null +++ b/examples/fastapi/fastapi_tracing_app/migrate.sh @@ -0,0 +1,5 @@ +#! /usr/bin/env bash +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +alembic upgrade head diff --git a/examples/fastapi/fastapi_tracing_app/requirements.txt b/examples/fastapi/fastapi_tracing_app/requirements.txt new file mode 100644 index 0000000..c7f2b22 --- /dev/null +++ b/examples/fastapi/fastapi_tracing_app/requirements.txt @@ -0,0 +1,11 @@ +fastapi +SQLAlchemy +alembic +psycopg2-binary +opentelemetry-api +opentelemetry-sdk +opentelemetry-distro +opentelemetry-instrumentation-fastapi +opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http +opentelemetry-instrumentation diff --git a/examples/fastapi/fastapi_tracing_app/rockcraft.yaml b/examples/fastapi/fastapi_tracing_app/rockcraft.yaml new file mode 100644 index 0000000..088e5e1 --- /dev/null +++ b/examples/fastapi/fastapi_tracing_app/rockcraft.yaml @@ -0,0 +1,25 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +name: fastapi-tracing-app +summary: Example FastAPI application image. +description: Example FastAPI application image. +version: "0.1" +base: ubuntu@24.04 + +platforms: + amd64: + +extensions: + - fastapi-framework + +parts: + alembic: + plugin: dump + source: . + organize: + alembic: app/alembic + alembic.ini: app/alembic.ini + stage: + - app/alembic + - app/alembic.ini diff --git a/examples/fastapi/requirements.txt b/examples/fastapi/requirements.txt deleted file mode 100644 index 9af200c..0000000 --- a/examples/fastapi/requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -fastapi -SQLAlchemy -alembic -psycopg2-binary -opentelemetry-api -opentelemetry-sdk -opentelemetry-instrumentation-fastapi diff --git a/examples/flask/test_rock/app.py b/examples/flask/test_rock/app.py index 03da5ef..7d0b087 100644 --- a/examples/flask/test_rock/app.py +++ b/examples/flask/test_rock/app.py @@ -20,8 +20,6 @@ import redis from celery import Celery, Task from flask import Flask, g, jsonify, request -from opentelemetry import trace -from opentelemetry.instrumentation.flask import FlaskInstrumentor def hostname(): @@ -57,46 +55,12 @@ def __call__(self, *args: object, **kwargs: object) -> object: app = Flask(__name__) app.config.from_prefixed_env() -FlaskInstrumentor().instrument_app(app) - -tracer = trace.get_tracer(__name__) - broker_url = os.environ.get("REDIS_DB_CONNECT_STRING") # Configure Celery only if Redis is configured celery_app = celery_init_app(app, broker_url) redis_client = redis.Redis.from_url(broker_url) if broker_url else None -def fib_slow(n): - if n <= 1: - return n - return fib_slow(n - 1) + fib_fast(n - 2) - - -def fib_fast(n): - nth_fib = [0] * (n + 2) - nth_fib[1] = 1 - for i in range(2, n + 1): - nth_fib[i] = nth_fib[i - 1] + nth_fib[i - 2] - return nth_fib[n] - - -@application.route("/fibonacci") -def fibonacci(): - n = int(request.args.get("n", 1)) - with tracer.start_as_current_span("root"): - with tracer.start_as_current_span("fib_slow") as slow_span: - answer = fib_slow(n) - slow_span.set_attribute("n", n) - slow_span.set_attribute("nth_fibonacci", answer) - with tracer.start_as_current_span("fib_fast") as fast_span: - answer = fib_fast(n) - fast_span.set_attribute("n", n) - fast_span.set_attribute("nth_fibonacci", answer) - - return f"F({n}) is: ({answer})" - - @celery_app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): """Set up periodic tasks in the scheduler.""" diff --git a/examples/flask/test_tracing_rock/app.py b/examples/flask/test_tracing_rock/app.py new file mode 100644 index 0000000..57bafba --- /dev/null +++ b/examples/flask/test_tracing_rock/app.py @@ -0,0 +1,395 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +import logging +import os +import socket +import time +import urllib.parse +from urllib.parse import urlparse + +import boto3 +import botocore.config +import pika +import psycopg +import pymongo +import pymongo.database +import pymongo.errors +import pymysql +import pymysql.cursors +import redis +from celery import Celery, Task +from flask import Flask, g, jsonify, request +from opentelemetry import trace +from opentelemetry.instrumentation.flask import FlaskInstrumentor + + +def hostname(): + """Get the hostname of the current machine.""" + return socket.gethostbyname(socket.gethostname()) + + +def celery_init_app(app: Flask, broker_url: str) -> Celery: + """Initialise celery using the redis connection string. + + See https://flask.palletsprojects.com/en/3.0.x/patterns/celery/#integrate-celery-with-flask. + """ + + class FlaskTask(Task): + def __call__(self, *args: object, **kwargs: object) -> object: + with app.app_context(): + return self.run(*args, **kwargs) + + celery_app = Celery(app.name, task_cls=FlaskTask) + celery_app.set_default() + app.extensions["celery"] = celery_app + app.config.from_mapping( + CELERY=dict( + broker_url=broker_url, + result_backend=broker_url, + task_ignore_result=True, + ), + ) + celery_app.config_from_object(app.config["CELERY"]) + return celery_app + + +app = Flask(__name__) +app.config.from_prefixed_env() + +FlaskInstrumentor().instrument_app(app) + +tracer = trace.get_tracer(__name__) + +broker_url = os.environ.get("REDIS_DB_CONNECT_STRING") +# Configure Celery only if Redis is configured +celery_app = celery_init_app(app, broker_url) +redis_client = redis.Redis.from_url(broker_url) if broker_url else None + + +def fib_slow(n): + if n <= 1: + return n + return fib_slow(n - 1) + fib_fast(n - 2) + + +def fib_fast(n): + nth_fib = [0] * (n + 2) + nth_fib[1] = 1 + for i in range(2, n + 1): + nth_fib[i] = nth_fib[i - 1] + nth_fib[i - 2] + return nth_fib[n] + + +@app.route("/fibonacci") +def fibonacci(): + n = int(request.args.get("n", 1)) + with tracer.start_as_current_span("root"): + with tracer.start_as_current_span("fib_slow") as slow_span: + answer = fib_slow(n) + slow_span.set_attribute("n", n) + slow_span.set_attribute("nth_fibonacci", answer) + with tracer.start_as_current_span("fib_fast") as fast_span: + answer = fib_fast(n) + fast_span.set_attribute("n", n) + fast_span.set_attribute("nth_fibonacci", answer) + + return f"F({n}) is: ({answer})" + + +@celery_app.on_after_configure.connect +def setup_periodic_tasks(sender, **kwargs): + """Set up periodic tasks in the scheduler.""" + try: + # This will only have an effect in the beat scheduler. + sender.add_periodic_task(0.5, scheduled_task.s(hostname()), name="every 0.5s") + except NameError as e: + logging.exception("Failed to configure the periodic task") + + +@celery_app.task +def scheduled_task(scheduler_hostname): + """Function to run a schedule task in a worker. + + The worker that will run this task will add the scheduler hostname argument + to the "schedulers" set in Redis, and the worker's hostname to the "workers" + set in Redis. + """ + worker_hostname = hostname() + logging.info( + "scheduler host received %s in worker host %s", scheduler_hostname, worker_hostname + ) + redis_client.sadd("schedulers", scheduler_hostname) + redis_client.sadd("workers", worker_hostname) + logging.info("schedulers: %s", redis_client.smembers("schedulers")) + logging.info("workers: %s", redis_client.smembers("workers")) + # The goal is to have all workers busy in all processes. + # For that it maybe necessary to exhaust all workers, but not to get the pending tasks + # too big, so all schedulers can manage to run their scheduled tasks. + # Celery prefetches tasks, and if they cannot be run they are put in reserved. + # If all processes have tasks in reserved, this task will finish immediately to not make + # queues any longer. + inspect_obj = celery_app.control.inspect() + reserved_sizes = [len(tasks) for tasks in inspect_obj.reserved().values()] + logging.info("number of reserved tasks %s", reserved_sizes) + delay = 0 if min(reserved_sizes) > 0 else 5 + time.sleep(delay) + + +def get_mysql_database(): + """Get the mysql db connection.""" + if "mysql_db" not in g: + if "MYSQL_DB_CONNECT_STRING" in os.environ: + uri_parts = urlparse(os.environ["MYSQL_DB_CONNECT_STRING"]) + g.mysql_db = pymysql.connect( + host=uri_parts.hostname, + user=uri_parts.username, + password=uri_parts.password, + database=uri_parts.path[1:], + port=uri_parts.port, + ) + else: + return None + return g.mysql_db + + +def get_postgresql_database(): + """Get the postgresql db connection.""" + if "postgresql_db" not in g: + if "POSTGRESQL_DB_CONNECT_STRING" in os.environ: + g.postgresql_db = psycopg.connect( + conninfo=os.environ["POSTGRESQL_DB_CONNECT_STRING"], + ) + else: + return None + return g.postgresql_db + + +def get_mongodb_database() -> pymongo.database.Database | None: + """Get the mongodb db connection.""" + if "mongodb_db" not in g: + if "MONGODB_DB_CONNECT_STRING" in os.environ: + uri = os.environ["MONGODB_DB_CONNECT_STRING"] + client = pymongo.MongoClient(uri) + db = urllib.parse.urlparse(uri).path.removeprefix("/") + g.mongodb_db = client.get_database(db) + else: + return None + return g.mongodb_db + + +def get_redis_database() -> redis.Redis | None: + if "redis_db" not in g: + if "REDIS_DB_CONNECT_STRING" in os.environ: + uri = os.environ["REDIS_DB_CONNECT_STRING"] + g.redis_db = redis.Redis.from_url(uri) + else: + return None + return g.redis_db + + +def get_rabbitmq_connection() -> pika.BlockingConnection | None: + """Get rabbitmq connection.""" + if "rabbitmq" not in g: + if "RABBITMQ_HOSTNAME" in os.environ: + username = os.environ["RABBITMQ_USERNAME"] + password = os.environ["RABBITMQ_PASSWORD"] + hostname = os.environ["RABBITMQ_HOSTNAME"] + vhost = os.environ["RABBITMQ_VHOST"] + port = os.environ["RABBITMQ_PORT"] + credentials = pika.PlainCredentials(username, password) + parameters = pika.ConnectionParameters(hostname, port, vhost, credentials) + g.rabbitmq = pika.BlockingConnection(parameters) + else: + return None + return g.rabbitmq + + +def get_rabbitmq_connection_from_uri() -> pika.BlockingConnection | None: + """Get rabbitmq connection from uri.""" + if "rabbitmq_from_uri" not in g: + if "RABBITMQ_CONNECT_STRING" in os.environ: + uri = os.environ["RABBITMQ_CONNECT_STRING"] + parameters = pika.URLParameters(uri) + g.rabbitmq_from_uri = pika.BlockingConnection(parameters) + else: + return None + return g.rabbitmq_from_uri + + +def get_boto3_client(): + if "boto3_client" not in g: + if "S3_ACCESS_KEY" in os.environ: + s3_client_config = botocore.config.Config( + s3={ + "addressing_style": os.environ["S3_ADDRESSING_STYLE"], + }, + # no_proxy env variable is not read by boto3, so + # this is needed for the tests to avoid hitting the proxy. + proxies={}, + ) + g.boto3_client = boto3.client( + "s3", + os.environ["S3_REGION"], + aws_access_key_id=os.environ["S3_ACCESS_KEY"], + aws_secret_access_key=os.environ["S3_SECRET_KEY"], + endpoint_url=os.environ["S3_ENDPOINT"], + use_ssl=False, + config=s3_client_config, + ) + else: + return None + return g.boto3_client + + +@app.teardown_appcontext +def teardown_database(_): + """Tear down databases connections.""" + mysql_db = g.pop("mysql_db", None) + if mysql_db is not None: + mysql_db.close() + postgresql_db = g.pop("postgresql_db", None) + if postgresql_db is not None: + postgresql_db.close() + mongodb_db = g.pop("mongodb_db", None) + if mongodb_db is not None: + mongodb_db.client.close() + boto3_client = g.pop("boto3_client", None) + if boto3_client is not None: + boto3_client.close() + rabbitmq = g.pop("rabbitmq", None) + if rabbitmq is not None: + rabbitmq.close() + rabbitmq_from_uri = g.pop("rabbitmq_from_uri", None) + if rabbitmq_from_uri is not None: + rabbitmq_from_uri.close() + + +@app.route("/") +def hello_world(): + return "Hello, World!" + + +@app.route("/sleep") +def sleep(): + duration_seconds = int(request.args.get("duration")) + time.sleep(duration_seconds) + return "" + + +@app.route("/config/") +def config(config_name: str): + return jsonify(app.config.get(config_name)) + + +@app.route("/mysql/status") +def mysql_status(): + """Mysql status endpoint.""" + if database := get_mysql_database(): + with database.cursor() as cursor: + sql = "SELECT version()" + cursor.execute(sql) + cursor.fetchone() + return "SUCCESS" + return "FAIL" + + +@app.route("/s3/status") +def s3_status(): + """S3 status endpoint.""" + if client := get_boto3_client(): + bucket_name = os.environ["S3_BUCKET"] + objectsresponse = client.list_objects(Bucket=bucket_name) + return "SUCCESS" + return "FAIL" + + +@app.route("/postgresql/status") +def postgresql_status(): + """Postgresql status endpoint.""" + if database := get_postgresql_database(): + with database.cursor() as cursor: + sql = "SELECT version()" + cursor.execute(sql) + cursor.fetchone() + return "SUCCESS" + return "FAIL" + + +@app.route("/mongodb/status") +def mongodb_status(): + """Mongodb status endpoint.""" + if (database := get_mongodb_database()) is not None: + database.list_collection_names() + return "SUCCESS" + return "FAIL" + + +@app.route("/redis/status") +def redis_status(): + """Redis status endpoint.""" + if database := get_redis_database(): + try: + database.set("foo", "bar") + return "SUCCESS" + except redis.exceptions.RedisError: + logging.exception("Error querying redis") + return "FAIL" + + +@app.route("/redis/clear_celery_stats") +def redis_celery_clear_stats(): + """Reset Redis statistics about workers and schedulers.""" + if database := get_redis_database(): + try: + database.delete("workers") + database.delete("schedulers") + return "SUCCESS" + except redis.exceptions.RedisError: + logging.exception("Error querying redis") + return "FAIL", 500 + + +@app.route("/redis/celery_stats") +def redis_celery_stats(): + """Read Redis statistics about workers and schedulers.""" + if database := get_redis_database(): + try: + worker_set = [str(host) for host in database.smembers("workers")] + beat_set = [str(host) for host in database.smembers("schedulers")] + return jsonify({"workers": worker_set, "schedulers": beat_set}) + except redis.exceptions.RedisError: + logging.exception("Error querying redis") + return "FAIL", 500 + + +@app.route("/rabbitmq/send") +def rabbitmq_send(): + """Send a message to "charm" queue.""" + if connection := get_rabbitmq_connection(): + channel = connection.channel() + channel.queue_declare(queue="charm") + channel.basic_publish(exchange="", routing_key="charm", body="SUCCESS") + return "SUCCESS" + return "FAIL" + + +@app.route("/rabbitmq/receive") +def rabbitmq_receive(): + """Receive a message from "charm" queue in blocking form.""" + if connection := get_rabbitmq_connection_from_uri(): + channel = connection.channel() + method_frame, _header_frame, body = channel.basic_get("charm") + if method_frame: + channel.basic_ack(method_frame.delivery_tag) + if body == b"SUCCESS": + return "SUCCESS" + return "FAIL. INCORRECT MESSAGE." + return "FAIL. NO MESSAGE." + return "FAIL. NO CONNECTION." + + +@app.route("/env") +def get_env(): + """Return environment variables""" + return jsonify(dict(os.environ)) diff --git a/examples/flask/test_tracing_rock/requirements.txt b/examples/flask/test_tracing_rock/requirements.txt new file mode 100644 index 0000000..9966cad --- /dev/null +++ b/examples/flask/test_tracing_rock/requirements.txt @@ -0,0 +1,18 @@ +Flask +PyMySQL +PyMySQL[rsa] +PyMySQL[ed25519] +psycopg[binary] +pymongo +redis[hiredis] +boto3 +pika +celery +opentelemetry-api +opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http +opentelemetry-instrumentation +opentelemetry-instrumentation-flask +opentelemetry-instrumentation-wsgi +opentelemetry-sdk +opentelemetry-distro diff --git a/examples/flask/test_tracing_rock/rockcraft.yaml b/examples/flask/test_tracing_rock/rockcraft.yaml new file mode 100644 index 0000000..ec722b8 --- /dev/null +++ b/examples/flask/test_tracing_rock/rockcraft.yaml @@ -0,0 +1,27 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +name: test-tracing-flask +summary: A flask test app +description: OCI image for the test flask app +version: "0.1" +base: ubuntu@22.04 +license: Apache-2.0 +platforms: + amd64: + +extensions: + - flask-framework + +services: + celery-worker: + override: replace + command: celery -A app:celery_app worker -c 2 --loglevel DEBUG + startup: enabled + user: _daemon_ + working-dir: /flask/app + celery-beat-scheduler: + override: replace + command: celery -A app:celery_app beat --loglevel DEBUG -s /tmp/celerybeat-schedule + startup: enabled + user: _daemon_ + working-dir: /flask/app diff --git a/examples/go/charm/charmcraft.yaml b/examples/go/charm/charmcraft.yaml index 01e6585..06bd01a 100644 --- a/examples/go/charm/charmcraft.yaml +++ b/examples/go/charm/charmcraft.yaml @@ -70,6 +70,10 @@ requires: interface: postgresql_client optional: True limit: 1 + tracing: + interface: tracing + optional: True + limit: 1 resources: app-image: description: go application image. diff --git a/examples/go/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/go/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py new file mode 100644 index 0000000..1dd78b5 --- /dev/null +++ b/examples/go/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -0,0 +1,1000 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +"""## Overview. + +This document explains how to integrate with the Tempo charm for the purpose of pushing traces to a +tracing endpoint provided by Tempo. It also explains how alternative implementations of the Tempo charm +may maintain the same interface and be backward compatible with all currently integrated charms. + +## Requirer Library Usage + +Charms seeking to push traces to Tempo, must do so using the `TracingEndpointRequirer` +object from this charm library. For the simplest use cases, using the `TracingEndpointRequirer` +object only requires instantiating it, typically in the constructor of your charm. The +`TracingEndpointRequirer` constructor requires the name of the relation over which a tracing endpoint + is exposed by the Tempo charm, and a list of protocols it intends to send traces with. + This relation must use the `tracing` interface. + The `TracingEndpointRequirer` object may be instantiated as follows + + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer + + def __init__(self, *args): + super().__init__(*args) + # ... + self.tracing = TracingEndpointRequirer(self, + protocols=['otlp_grpc', 'otlp_http', 'jaeger_http_thrift'] + ) + # ... + +Note that the first argument (`self`) to `TracingEndpointRequirer` is always a reference to the +parent charm. + +Alternatively to providing the list of requested protocols at init time, the charm can do it at +any point in time by calling the +`TracingEndpointRequirer.request_protocols(*protocol:str, relation:Optional[Relation])` method. +Using this method also allows you to use per-relation protocols. + +Units of requirer charms obtain the tempo endpoint to which they will push their traces by calling +`TracingEndpointRequirer.get_endpoint(protocol: str)`, where `protocol` is, for example: +- `otlp_grpc` +- `otlp_http` +- `zipkin` +- `tempo` + +If the `protocol` is not in the list of protocols that the charm requested at endpoint set-up time, +the library will raise an error. + +We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests +go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down. + +## Provider Library Usage + +The `TracingEndpointProvider` object may be used by charms to manage relations with their +trace sources. For this purposes a Tempo-like charm needs to do two things + +1. Instantiate the `TracingEndpointProvider` object by providing it a +reference to the parent (Tempo) charm and optionally the name of the relation that the Tempo charm +uses to interact with its trace sources. This relation must conform to the `tracing` interface +and it is strongly recommended that this relation be named `tracing` which is its +default value. + +For example a Tempo charm may instantiate the `TracingEndpointProvider` in its constructor as +follows + + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointProvider + + def __init__(self, *args): + super().__init__(*args) + # ... + self.tracing = TracingEndpointProvider(self) + # ... + + + +""" # noqa: W505 +import enum +import json +import logging +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Dict, + List, + Literal, + MutableMapping, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +import pydantic +from ops.charm import CharmBase, CharmEvents, RelationBrokenEvent, RelationEvent, RelationRole +from ops.framework import EventSource, Object +from ops.model import ModelError, Relation +from pydantic import BaseModel, Field + +# The unique Charmhub library identifier, never change it +LIBID = "d2f02b1f8d1244b5989fd55bc3a28943" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 4 + +PYDEPS = ["pydantic"] + +logger = logging.getLogger(__name__) + +DEFAULT_RELATION_NAME = "tracing" +RELATION_INTERFACE_NAME = "tracing" + +# Supported list rationale https://github.com/canonical/tempo-coordinator-k8s-operator/issues/8 +ReceiverProtocol = Literal[ + "zipkin", + "otlp_grpc", + "otlp_http", + "jaeger_grpc", + "jaeger_thrift_http", +] + +RawReceiver = Tuple[ReceiverProtocol, str] +"""Helper type. A raw receiver is defined as a tuple consisting of the protocol name, and the (external, if available), +(secured, if available) resolvable server url. +""" + +BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"} + + +class TransportProtocolType(str, enum.Enum): + """Receiver Type.""" + + http = "http" + grpc = "grpc" + + +receiver_protocol_to_transport_protocol: Dict[ReceiverProtocol, TransportProtocolType] = { + "zipkin": TransportProtocolType.http, + "otlp_grpc": TransportProtocolType.grpc, + "otlp_http": TransportProtocolType.http, + "jaeger_thrift_http": TransportProtocolType.http, + "jaeger_grpc": TransportProtocolType.grpc, +} +"""A mapping between telemetry protocols and their corresponding transport protocol. +""" + + +class TracingError(Exception): + """Base class for custom errors raised by this library.""" + + +class NotReadyError(TracingError): + """Raised by the provider wrapper if a requirer hasn't published the required data (yet).""" + + +class ProtocolNotRequestedError(TracingError): + """Raised if the user attempts to obtain an endpoint for a protocol it did not request.""" + + +class DataValidationError(TracingError): + """Raised when data validation fails on IPU relation data.""" + + +class AmbiguousRelationUsageError(TracingError): + """Raised when one wrongly assumes that there can only be one relation on an endpoint.""" + + +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class DatabagModel(BaseModel): # type: ignore + """Base databag model.""" + + class Config: + """Pydantic config.""" + + # ignore any extra fields in the databag + extra = "ignore" + """Ignore any extra fields in the databag.""" + allow_population_by_field_name = True + """Allow instantiating this class by field name (instead of forcing alias).""" + + _NEST_UNDER = None + + @classmethod + def load(cls, databag: MutableMapping): + """Load this model from a Juju databag.""" + if cls._NEST_UNDER: + return cls.parse_obj(json.loads(databag[cls._NEST_UNDER])) + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {f.alias for f in cls.__fields__.values()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.parse_raw(json.dumps(data)) # type: ignore + except pydantic.ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): + """Write the contents of this model to Juju databag. + + :param databag: the databag to write the data to. + :param clear: ensure the databag is cleared before writing it. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + + if self._NEST_UNDER: + databag[self._NEST_UNDER] = self.json(by_alias=True) + return databag + + dct = self.dict() + for key, field in self.__fields__.items(): # type: ignore + value = dct[key] + databag[field.alias or key] = json.dumps(value) + + return databag + +else: + from pydantic import ConfigDict + + class DatabagModel(BaseModel): + """Base databag model.""" + + model_config = ConfigDict( + # ignore any extra fields in the databag + extra="ignore", + # Allow instantiating this class by field name (instead of forcing alias). + populate_by_name=True, + # Custom config key: whether to nest the whole datastructure (as json) + # under a field or spread it out at the toplevel. + _NEST_UNDER=None, # type: ignore + ) + """Pydantic config.""" + + @classmethod + def load(cls, databag: MutableMapping): + """Load this model from a Juju databag.""" + nest_under = cls.model_config.get("_NEST_UNDER") # type: ignore + if nest_under: + return cls.model_validate(json.loads(databag[nest_under])) # type: ignore + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {(f.alias or n) for n, f in cls.__fields__.items()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.model_validate_json(json.dumps(data)) # type: ignore + except pydantic.ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): + """Write the contents of this model to Juju databag. + + :param databag: the databag to write the data to. + :param clear: ensure the databag is cleared before writing it. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + nest_under = self.model_config.get("_NEST_UNDER") + if nest_under: + databag[nest_under] = self.model_dump_json( # type: ignore + by_alias=True, + # skip keys whose values are default + exclude_defaults=True, + ) + return databag + + dct = self.model_dump() # type: ignore + for key, field in self.model_fields.items(): # type: ignore + value = dct[key] + if value == field.default: + continue + databag[field.alias or key] = json.dumps(value) + + return databag + + +# todo use models from charm-relation-interfaces +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class ProtocolType(BaseModel): # type: ignore + """Protocol Type.""" + + class Config: + """Pydantic config.""" + + use_enum_values = True + """Allow serializing enum values.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + +else: + + class ProtocolType(BaseModel): + """Protocol Type.""" + + model_config = ConfigDict( # type: ignore + # Allow serializing enum values. + use_enum_values=True + ) + """Pydantic config.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + + +class Receiver(BaseModel): + """Specification of an active receiver.""" + + protocol: ProtocolType = Field(..., description="Receiver protocol name and type.") + url: str = Field( + ..., + description="""URL at which the receiver is reachable. If there's an ingress, it would be the external URL. + Otherwise, it would be the service's fqdn or internal IP. + If the protocol type is grpc, the url will not contain a scheme.""", + examples=[ + "http://traefik_address:2331", + "https://traefik_address:2331", + "http://tempo_public_ip:2331", + "https://tempo_public_ip:2331", + "tempo_public_ip:2331", + ], + ) + + +class TracingProviderAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing provider.""" + + receivers: List[Receiver] = Field( + ..., + description="List of all receivers enabled on the tracing provider.", + ) + + +class TracingRequirerAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing requirer.""" + + receivers: List[ReceiverProtocol] + """Requested receivers.""" + + +class _AutoSnapshotEvent(RelationEvent): + __args__: Tuple[str, ...] = () + __optional_kwargs__: Dict[str, Any] = {} + + @classmethod + def __attrs__(cls): + return cls.__args__ + tuple(cls.__optional_kwargs__.keys()) + + def __init__(self, handle, relation, *args, **kwargs): + super().__init__(handle, relation) + + if not len(self.__args__) == len(args): + raise TypeError("expected {} args, got {}".format(len(self.__args__), len(args))) + + for attr, obj in zip(self.__args__, args): + setattr(self, attr, obj) + for attr, default in self.__optional_kwargs__.items(): + obj = kwargs.get(attr, default) + setattr(self, attr, obj) + + def snapshot(self) -> dict: + dct = super().snapshot() + for attr in self.__attrs__(): + obj = getattr(self, attr) + try: + dct[attr] = obj + except ValueError as e: + raise ValueError( + "cannot automagically serialize {}: " + "override this method and do it " + "manually.".format(obj) + ) from e + + return dct + + def restore(self, snapshot: dict) -> None: + super().restore(snapshot) + for attr, obj in snapshot.items(): + setattr(self, attr, obj) + + +class RelationNotFoundError(Exception): + """Raised if no relation with the given name is found.""" + + def __init__(self, relation_name: str): + self.relation_name = relation_name + self.message = "No relation named '{}' found".format(relation_name) + super().__init__(self.message) + + +class RelationInterfaceMismatchError(Exception): + """Raised if the relation with the given name has an unexpected interface.""" + + def __init__( + self, + relation_name: str, + expected_relation_interface: str, + actual_relation_interface: str, + ): + self.relation_name = relation_name + self.expected_relation_interface = expected_relation_interface + self.actual_relation_interface = actual_relation_interface + self.message = ( + "The '{}' relation has '{}' as interface rather than the expected '{}'".format( + relation_name, actual_relation_interface, expected_relation_interface + ) + ) + + super().__init__(self.message) + + +class RelationRoleMismatchError(Exception): + """Raised if the relation with the given name has a different role than expected.""" + + def __init__( + self, + relation_name: str, + expected_relation_role: RelationRole, + actual_relation_role: RelationRole, + ): + self.relation_name = relation_name + self.expected_relation_interface = expected_relation_role + self.actual_relation_role = actual_relation_role + self.message = "The '{}' relation has role '{}' rather than the expected '{}'".format( + relation_name, repr(actual_relation_role), repr(expected_relation_role) + ) + + super().__init__(self.message) + + +def _validate_relation_by_interface_and_direction( + charm: CharmBase, + relation_name: str, + expected_relation_interface: str, + expected_relation_role: RelationRole, +): + """Validate a relation. + + Verifies that the `relation_name` provided: (1) exists in metadata.yaml, + (2) declares as interface the interface name passed as `relation_interface` + and (3) has the right "direction", i.e., it is a relation that `charm` + provides or requires. + + Args: + charm: a `CharmBase` object to scan for the matching relation. + relation_name: the name of the relation to be verified. + expected_relation_interface: the interface name to be matched by the + relation named `relation_name`. + expected_relation_role: whether the `relation_name` must be either + provided or required by `charm`. + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the same relation interface + as specified via the `expected_relation_interface` argument. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the same role as specified + via the `expected_relation_role` argument. + """ + if relation_name not in charm.meta.relations: + raise RelationNotFoundError(relation_name) + + relation = charm.meta.relations[relation_name] + + # fixme: why do we need to cast here? + actual_relation_interface = cast(str, relation.interface_name) + + if actual_relation_interface != expected_relation_interface: + raise RelationInterfaceMismatchError( + relation_name, expected_relation_interface, actual_relation_interface + ) + + if expected_relation_role is RelationRole.provides: + if relation_name not in charm.meta.provides: + raise RelationRoleMismatchError( + relation_name, RelationRole.provides, RelationRole.requires + ) + elif expected_relation_role is RelationRole.requires: + if relation_name not in charm.meta.requires: + raise RelationRoleMismatchError( + relation_name, RelationRole.requires, RelationRole.provides + ) + else: + raise TypeError("Unexpected RelationDirection: {}".format(expected_relation_role)) + + +class RequestEvent(RelationEvent): + """Event emitted when a remote requests a tracing endpoint.""" + + @property + def requested_receivers(self) -> List[ReceiverProtocol]: + """List of receiver protocols that have been requested.""" + relation = self.relation + app = relation.app + if not app: + raise NotReadyError("relation.app is None") + + return TracingRequirerAppData.load(relation.data[app]).receivers + + +class BrokenEvent(RelationBrokenEvent): + """Event emitted when a relation on tracing is broken.""" + + +class TracingEndpointProviderEvents(CharmEvents): + """TracingEndpointProvider events.""" + + request = EventSource(RequestEvent) + broken = EventSource(BrokenEvent) + + +class TracingEndpointProvider(Object): + """Class representing a trace receiver service.""" + + on = TracingEndpointProviderEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + external_url: Optional[str] = None, + relation_name: str = DEFAULT_RELATION_NAME, + ): + """Initialize. + + Args: + charm: a `CharmBase` instance that manages this instance of the Tempo service. + external_url: external address of the node hosting the tempo server, + if an ingress is present. + relation_name: an optional string name of the relation between `charm` + and the Tempo charmed service. The default is "tracing". + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the `tracing` relation + interface. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the `RelationRole.requires` + role. + """ + _validate_relation_by_interface_and_direction( + charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.provides + ) + + super().__init__(charm, relation_name + "tracing-provider") + self._charm = charm + self._external_url = external_url + self._relation_name = relation_name + self.framework.observe( + self._charm.on[relation_name].relation_joined, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_created, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_changed, self._on_relation_event + ) + self.framework.observe( + self._charm.on[relation_name].relation_broken, self._on_relation_broken_event + ) + + def _on_relation_broken_event(self, e: RelationBrokenEvent): + """Handle relation broken events.""" + self.on.broken.emit(e.relation) + + def _on_relation_event(self, e: RelationEvent): + """Handle relation created/joined/changed events.""" + if self.is_requirer_ready(e.relation): + self.on.request.emit(e.relation) + + def is_requirer_ready(self, relation: Relation): + """Attempt to determine if requirer has already populated app data.""" + try: + self._get_requested_protocols(relation) + except NotReadyError: + return False + return True + + @staticmethod + def _get_requested_protocols(relation: Relation): + app = relation.app + if not app: + raise NotReadyError("relation.app is None") + + try: + databag = TracingRequirerAppData.load(relation.data[app]) + except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): + logger.info(f"relation {relation} is not ready to talk tracing") + raise NotReadyError() + return databag.receivers + + def requested_protocols(self): + """All receiver protocols that have been requested by our related apps.""" + requested_protocols = set() + for relation in self.relations: + try: + protocols = self._get_requested_protocols(relation) + except NotReadyError: + continue + requested_protocols.update(protocols) + return requested_protocols + + @property + def relations(self) -> List[Relation]: + """All relations active on this endpoint.""" + return self._charm.model.relations[self._relation_name] + + def publish_receivers(self, receivers: Sequence[RawReceiver]): + """Let all requirers know that these receivers are active and listening.""" + if not self._charm.unit.is_leader(): + raise RuntimeError("only leader can do this") + + for relation in self.relations: + try: + TracingProviderAppData( + receivers=[ + Receiver( + url=url, + protocol=ProtocolType( + name=protocol, + type=receiver_protocol_to_transport_protocol[protocol], + ), + ) + for protocol, url in receivers + ], + ).dump(relation.data[self._charm.app]) + + except ModelError as e: + # args are bytes + msg = e.args[0] + if isinstance(msg, bytes): + if msg.startswith( + b"ERROR cannot read relation application settings: permission denied" + ): + logger.error( + f"encountered error {e} while attempting to update_relation_data." + f"The relation must be gone." + ) + continue + raise + + +class EndpointRemovedEvent(RelationBrokenEvent): + """Event representing a change in one of the receiver endpoints.""" + + +class EndpointChangedEvent(_AutoSnapshotEvent): + """Event representing a change in one of the receiver endpoints.""" + + __args__ = ("_receivers",) + + if TYPE_CHECKING: + _receivers = [] # type: List[dict] + + @property + def receivers(self) -> List[Receiver]: + """Cast receivers back from dict.""" + return [Receiver(**i) for i in self._receivers] + + +class TracingEndpointRequirerEvents(CharmEvents): + """TracingEndpointRequirer events.""" + + endpoint_changed = EventSource(EndpointChangedEvent) + endpoint_removed = EventSource(EndpointRemovedEvent) + + +class TracingEndpointRequirer(Object): + """A tracing endpoint for Tempo.""" + + on = TracingEndpointRequirerEvents() # type: ignore + + def __init__( + self, + charm: CharmBase, + relation_name: str = DEFAULT_RELATION_NAME, + protocols: Optional[List[ReceiverProtocol]] = None, + ): + """Construct a tracing requirer for a Tempo charm. + + If your application supports pushing traces to a distributed tracing backend, the + `TracingEndpointRequirer` object enables your charm to easily access endpoint information + exchanged over a `tracing` relation interface. + + Args: + charm: a `CharmBase` object that manages this + `TracingEndpointRequirer` object. Typically, this is `self` in the instantiating + class. + relation_name: an optional string name of the relation between `charm` + and the Tempo charmed service. The default is "tracing". It is strongly + advised not to change the default, so that people deploying your charm will have a + consistent experience with all other charms that provide tracing endpoints. + protocols: optional list of protocols that the charm intends to send traces with. + The provider will enable receivers for these and only these protocols, + so be sure to enable all protocols the charm or its workload are going to need. + + Raises: + RelationNotFoundError: If there is no relation in the charm's metadata.yaml + with the same name as provided via `relation_name` argument. + RelationInterfaceMismatchError: The relation with the same name as provided + via `relation_name` argument does not have the `tracing` relation + interface. + RelationRoleMismatchError: If the relation with the same name as provided + via `relation_name` argument does not have the `RelationRole.provides` + role. + """ + _validate_relation_by_interface_and_direction( + charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.requires + ) + + super().__init__(charm, relation_name) + + self._is_single_endpoint = charm.meta.relations[relation_name].limit == 1 + + self._charm = charm + self._relation_name = relation_name + + events = self._charm.on[self._relation_name] + self.framework.observe(events.relation_changed, self._on_tracing_relation_changed) + self.framework.observe(events.relation_broken, self._on_tracing_relation_broken) + + if protocols: + self.request_protocols(protocols) + + def request_protocols( + self, protocols: Sequence[ReceiverProtocol], relation: Optional[Relation] = None + ): + """Publish the list of protocols which the provider should activate.""" + # todo: should we check if _is_single_endpoint and len(self.relations) > 1 and raise, here? + relations = [relation] if relation else self.relations + + if not protocols: + # empty sequence + raise ValueError( + "You need to pass a nonempty sequence of protocols to `request_protocols`." + ) + + try: + if self._charm.unit.is_leader(): + for relation in relations: + TracingRequirerAppData( + receivers=list(protocols), + ).dump(relation.data[self._charm.app]) + + except ModelError as e: + # args are bytes + msg = e.args[0] + if isinstance(msg, bytes): + if msg.startswith( + b"ERROR cannot read relation application settings: permission denied" + ): + logger.error( + f"encountered error {e} while attempting to request_protocols." + f"The relation must be gone." + ) + return + raise + + @property + def relations(self) -> List[Relation]: + """The tracing relations associated with this endpoint.""" + return self._charm.model.relations[self._relation_name] + + @property + def _relation(self) -> Optional[Relation]: + """If this wraps a single endpoint, the relation bound to it, if any.""" + if not self._is_single_endpoint: + objname = type(self).__name__ + raise AmbiguousRelationUsageError( + f"This {objname} wraps a {self._relation_name} endpoint that has " + "limit != 1. We can't determine what relation, of the possibly many, you are " + f"talking about. Please pass a relation instance while calling {objname}, " + "or set limit=1 in the charm metadata." + ) + relations = self.relations + return relations[0] if relations else None + + def is_ready(self, relation: Optional[Relation] = None): + """Is this endpoint ready?""" + relation = relation or self._relation + if not relation: + logger.debug(f"no relation on {self._relation_name !r}: tracing not ready") + return False + if relation.data is None: + logger.error(f"relation data is None for {relation}") + return False + if not relation.app: + logger.error(f"{relation} event received but there is no relation.app") + return False + try: + databag = dict(relation.data[relation.app]) + TracingProviderAppData.load(databag) + + except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): + logger.info(f"failed validating relation data for {relation}") + return False + return True + + def _on_tracing_relation_changed(self, event): + """Notify the providers that there is new endpoint information available.""" + relation = event.relation + if not self.is_ready(relation): + self.on.endpoint_removed.emit(relation) # type: ignore + return + + data = TracingProviderAppData.load(relation.data[relation.app]) + self.on.endpoint_changed.emit(relation, [i.dict() for i in data.receivers]) # type: ignore + + def _on_tracing_relation_broken(self, event: RelationBrokenEvent): + """Notify the providers that the endpoint is broken.""" + relation = event.relation + self.on.endpoint_removed.emit(relation) # type: ignore + + def get_all_endpoints( + self, relation: Optional[Relation] = None + ) -> Optional[TracingProviderAppData]: + """Unmarshalled relation data.""" + relation = relation or self._relation + if not self.is_ready(relation): + return + return TracingProviderAppData.load(relation.data[relation.app]) # type: ignore + + def _get_endpoint( + self, relation: Optional[Relation], protocol: ReceiverProtocol + ) -> Optional[str]: + app_data = self.get_all_endpoints(relation) + if not app_data: + return None + receivers: List[Receiver] = list( + filter(lambda i: i.protocol.name == protocol, app_data.receivers) + ) + if not receivers: + # it can happen if the charm requests tracing protocols, but the relay (such as grafana-agent) isn't yet + # connected to the tracing backend. In this case, it's not an error the charm author can do anything about + logger.warning(f"no receiver found with protocol={protocol!r}.") + return + if len(receivers) > 1: + # if we have more than 1 receiver that matches, it shouldn't matter which receiver we'll be using. + logger.warning( + f"too many receivers with protocol={protocol!r}; using first one. Found: {receivers}" + ) + + receiver = receivers[0] + return receiver.url + + def get_endpoint( + self, protocol: ReceiverProtocol, relation: Optional[Relation] = None + ) -> Optional[str]: + """Receiver endpoint for the given protocol. + + It could happen that this function gets called before the provider publishes the endpoints. + In such a scenario, if a non-leader unit calls this function, a permission denied exception will be raised due to + restricted access. To prevent this, this function needs to be guarded by the `is_ready` check. + + Raises: + ProtocolNotRequestedError: + If the charm unit is the leader unit and attempts to obtain an endpoint for a protocol it did not request. + """ + endpoint = self._get_endpoint(relation or self._relation, protocol=protocol) + if not endpoint: + requested_protocols = set() + relations = [relation] if relation else self.relations + for relation in relations: + try: + databag = TracingRequirerAppData.load(relation.data[self._charm.app]) + except DataValidationError: + continue + + requested_protocols.update(databag.receivers) + + if protocol not in requested_protocols: + raise ProtocolNotRequestedError(protocol, relation) + + return None + return endpoint + + +def charm_tracing_config( + endpoint_requirer: TracingEndpointRequirer, cert_path: Optional[Union[Path, str]] +) -> Tuple[Optional[str], Optional[str]]: + """Return the charm_tracing config you likely want. + + If no endpoint is provided: + disable charm tracing. + If https endpoint is provided but cert_path is not found on disk: + disable charm tracing. + If https endpoint is provided and cert_path is None: + ERROR + Else: + proceed with charm tracing (with or without tls, as appropriate) + + Usage: + If you are using charm_tracing >= v1.9: + >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config + >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") + >>> class MyCharm(...): + >>> _cert_path = "/path/to/cert/on/charm/container.crt" + >>> def __init__(self, ...): + >>> self.tracing = TracingEndpointRequirer(...) + >>> self.my_endpoint, self.cert_path = charm_tracing_config( + ... self.tracing, self._cert_path) + + If you are using charm_tracing < v1.9: + >>> from lib.charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from lib.charms.tempo_coordinator_k8s.v0.tracing import charm_tracing_config + >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") + >>> class MyCharm(...): + >>> _cert_path = "/path/to/cert/on/charm/container.crt" + >>> def __init__(self, ...): + >>> self.tracing = TracingEndpointRequirer(...) + >>> self._my_endpoint, self._cert_path = charm_tracing_config( + ... self.tracing, self._cert_path) + >>> @property + >>> def my_endpoint(self): + >>> return self._my_endpoint + >>> @property + >>> def cert_path(self): + >>> return self._cert_path + + """ + if not endpoint_requirer.is_ready(): + return None, None + + endpoint = endpoint_requirer.get_endpoint("otlp_http") + if not endpoint: + return None, None + + is_https = endpoint.startswith("https://") + + if is_https: + if cert_path is None or not Path(cert_path).exists(): + # disable charm tracing until we obtain a cert to prevent tls errors + logger.error( + "Tracing endpoint is https, but no server_cert has been passed." + "Please point @trace_charm to a `server_cert` attr. " + "This might also mean that the tracing provider is related to a " + "certificates provider, but this application is not (yet). " + "In that case, you might just have to wait a bit for the certificates " + "integration to settle. " + ) + return None, None + return endpoint, str(cert_path) + else: + return endpoint, None diff --git a/examples/go/go.mod b/examples/go/go_app/go.mod similarity index 100% rename from examples/go/go.mod rename to examples/go/go_app/go.mod diff --git a/examples/go/go.sum b/examples/go/go_app/go.sum similarity index 100% rename from examples/go/go.sum rename to examples/go/go_app/go.sum diff --git a/examples/go/internal/service/service.go b/examples/go/go_app/internal/service/service.go similarity index 100% rename from examples/go/internal/service/service.go rename to examples/go/go_app/internal/service/service.go diff --git a/examples/go/main.go b/examples/go/go_app/main.go similarity index 100% rename from examples/go/main.go rename to examples/go/go_app/main.go diff --git a/examples/go/migrate.sh b/examples/go/go_app/migrate.sh similarity index 100% rename from examples/go/migrate.sh rename to examples/go/go_app/migrate.sh diff --git a/examples/go/rockcraft.yaml b/examples/go/go_app/rockcraft.yaml similarity index 100% rename from examples/go/rockcraft.yaml rename to examples/go/go_app/rockcraft.yaml diff --git a/examples/go/go_tracing_app/go.mod b/examples/go/go_tracing_app/go.mod new file mode 100644 index 0000000..2a01308 --- /dev/null +++ b/examples/go/go_tracing_app/go.mod @@ -0,0 +1,46 @@ +// Copyright 2024 Canonical Ltd. +// See LICENSE file for licensing details. +module go-tracing-app + +go 1.22.7 + +toolchain go1.22.10 + +require ( + github.com/jackc/pgx/v5 v5.6.0 + github.com/prometheus/client_golang v1.19.1 +) + +require ( + github.com/beorn7/perks v1.0.1 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect + github.com/jackc/puddle/v2 v2.2.1 // indirect + github.com/prometheus/client_model v0.5.0 // indirect + github.com/prometheus/common v0.48.0 // indirect + github.com/prometheus/procfs v0.12.0 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/otel v1.33.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 // indirect + go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.33.0 // indirect + go.opentelemetry.io/otel/metric v1.33.0 // indirect + go.opentelemetry.io/otel/sdk v1.33.0 // indirect + go.opentelemetry.io/otel/trace v1.33.0 // indirect + go.opentelemetry.io/proto/otlp v1.4.0 // indirect + golang.org/x/crypto v0.30.0 // indirect + golang.org/x/net v0.32.0 // indirect + golang.org/x/sync v0.10.0 // indirect + golang.org/x/sys v0.28.0 // indirect + golang.org/x/text v0.21.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576 // indirect + google.golang.org/grpc v1.68.1 // indirect + google.golang.org/protobuf v1.35.2 // indirect +) diff --git a/examples/go/go_tracing_app/go.sum b/examples/go/go_tracing_app/go.sum new file mode 100644 index 0000000..d670637 --- /dev/null +++ b/examples/go/go_tracing_app/go.sum @@ -0,0 +1,96 @@ +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0 h1:TmHmbvxPmaegwhDubVz0lICL0J5Ka2vwTzhoePEXsGE= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0/go.mod h1:qztMSjm835F2bXf+5HKAPIS5qsmQDqZna/PgVt4rWtI= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.6.0 h1:SWJzexBzPL5jb0GEsrPMLIsi/3jOo7RHlzTjcAeDrPY= +github.com/jackc/pgx/v5 v5.6.0/go.mod h1:DNZ/vlrUnhWCoFGxHAG8U2ljioxukquj7utPDgtQdTw= +github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= +github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE= +github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho= +github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= +github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= +github.com/prometheus/common v0.48.0 h1:QO8U2CdOzSn1BBsmXJXduaaW+dY/5QLjfB8svtSzKKE= +github.com/prometheus/common v0.48.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc= +github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= +github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/otel v1.33.0 h1:/FerN9bax5LoK51X/sI0SVYrjSE0/yUL7DpxW4K3FWw= +go.opentelemetry.io/otel v1.33.0/go.mod h1:SUUkR6csvUQl+yjReHu5uM3EtVV7MBm5FHKRlNx4I8I= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 h1:Vh5HayB/0HHfOQA7Ctx69E/Y/DcQSMPpKANYVMQ7fBA= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0/go.mod h1:cpgtDBaqD/6ok/UG0jT15/uKjAY8mRA53diogHBg3UI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 h1:wpMfgF8E1rkrT1Z6meFh1NDtownE9Ii3n3X2GJYjsaU= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0/go.mod h1:wAy0T/dUbs468uOlkT31xjvqQgEVXv58BRFWEgn5v/0= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.33.0 h1:W5AWUn/IVe8RFb5pZx1Uh9Laf/4+Qmm4kJL5zPuvR+0= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.33.0/go.mod h1:mzKxJywMNBdEX8TSJais3NnsVZUaJ+bAy6UxPTng2vk= +go.opentelemetry.io/otel/metric v1.33.0 h1:r+JOocAyeRVXD8lZpjdQjzMadVZp2M4WmQ+5WtEnklQ= +go.opentelemetry.io/otel/metric v1.33.0/go.mod h1:L9+Fyctbp6HFTddIxClbQkjtubW6O9QS3Ann/M82u6M= +go.opentelemetry.io/otel/sdk v1.33.0 h1:iax7M131HuAm9QkZotNHEfstof92xM+N8sr3uHXc2IM= +go.opentelemetry.io/otel/sdk v1.33.0/go.mod h1:A1Q5oi7/9XaMlIWzPSxLRWOI8nG3FnzHJNbiENQuihM= +go.opentelemetry.io/otel/trace v1.33.0 h1:cCJuF7LRjUFso9LPnEAHJDB2pqzp+hbO8eu1qqW2d/s= +go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck= +go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg= +go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY= +golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= +golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.30.0 h1:RwoQn3GkWiMkzlX562cLB7OxWvjH1L8xutO2WoJcRoY= +golang.org/x/crypto v0.30.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/net v0.32.0 h1:ZqPmj8Kzc+Y6e0+skZsuACbx+wzMgo5MQsJh9Qd6aYI= +golang.org/x/net v0.32.0/go.mod h1:CwU0IoeOlnQQWJ6ioyFrfRuomB8GKF6KbYXZVyeXNfs= +golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 h1:CkkIfIt50+lT6NHAVoRYEyAvQGFM7xEwXUUywFvEb3Q= +google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576/go.mod h1:1R3kvZ1dtP3+4p4d3G8uJ8rFk/fWlScl38vanWACI08= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576 h1:8ZmaLZE4XWrtU3MyClkYqqtl6Oegr3235h7jxsDyqCY= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576/go.mod h1:5uTbfoYQed2U9p3KIj2/Zzm02PYhndfdmML0qC3q3FU= +google.golang.org/grpc v1.68.1 h1:oI5oTa11+ng8r8XMMN7jAOmWfPZWbYpCFaMUTACxkM0= +google.golang.org/grpc v1.68.1/go.mod h1:+q1XYFJjShcqn0QZHvCyeR4CXPA+llXIeUIfIe00waw= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= +google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/examples/go/go_tracing_app/internal/service/service.go b/examples/go/go_tracing_app/internal/service/service.go new file mode 100644 index 0000000..e01b557 --- /dev/null +++ b/examples/go/go_tracing_app/internal/service/service.go @@ -0,0 +1,61 @@ +// Copyright 2024 Canonical Ltd. +// See LICENSE file for licensing details. + +package service + +import ( + "database/sql" + "log" + + "context" + + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" +) + +var lemonsKey = attribute.Key("ex.com/lemons") + +// SubOperation is an example to demonstrate the use of named tracer. +// It creates a named tracer with its package path. +func SubOperation(ctx context.Context) error { + // Using global provider. Alternative is to have application provide a getter + // for its component to get the instance of the provider. + tr := otel.Tracer("example.com/go-tracing-app") + + var span trace.Span + _, span = tr.Start(ctx, "Sub operation...") + defer span.End() + span.SetAttributes(lemonsKey.String("five")) + span.AddEvent("Sub span event") + + return nil +} +type Service struct { + PostgresqlURL string +} + +func (s *Service) CheckPostgresqlMigrateStatus() (err error) { + db, err := sql.Open("pgx", s.PostgresqlURL) + if err != nil { + return + } + defer db.Close() + + var version string + err = db.QueryRow("SELECT version()").Scan(&version) + if err != nil { + return + } + log.Printf("postgresql version %s.", version) + + var numUsers int + // This will fail if the table does not exist. + err = db.QueryRow("SELECT count(*) from USERS").Scan(&numUsers) + if err != nil { + return + } + log.Printf("Number of users in Postgresql %d.", numUsers) + + return +} diff --git a/examples/go/go_tracing_app/main.go b/examples/go/go_tracing_app/main.go new file mode 100644 index 0000000..1852985 --- /dev/null +++ b/examples/go/go_tracing_app/main.go @@ -0,0 +1,181 @@ +// Copyright 2024 Canonical Ltd. +// See LICENSE file for licensing details. + +package main + +import ( + "context" + "errors" + "fmt" + "go-tracing-app/internal/service" + "io" + "log" + "os" + "os/signal" + "syscall" + "time" + + "encoding/json" + "net/http" + + _ "github.com/jackc/pgx/v5/stdlib" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" + + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp" + sdktrace "go.opentelemetry.io/otel/sdk/trace" + "go.opentelemetry.io/otel/trace" +) + +type mainHandler struct { + counter prometheus.Counter + service service.Service +} + +func (h mainHandler) serveHelloWorld(w http.ResponseWriter, r *http.Request) { + h.counter.Inc() + log.Printf("Counter %#v\n", h.counter) + fmt.Fprintf(w, "Hello, World!") +} + +func (h mainHandler) serveUserDefinedConfig(w http.ResponseWriter, r *http.Request) { + h.counter.Inc() + + w.Header().Set("Content-Type", "application/json") + + user_defined_config, found := os.LookupEnv("APP_USER_DEFINED_CONFIG") + if !found { + json.NewEncoder(w).Encode(nil) + return + } + json.NewEncoder(w).Encode(user_defined_config) +} + +func (h mainHandler) servePostgresql(w http.ResponseWriter, r *http.Request) { + err := h.service.CheckPostgresqlMigrateStatus() + if err != nil { + log.Printf(err.Error()) + io.WriteString(w, "FAILURE") + return + } else { + io.WriteString(w, "SUCCESS") + } +} + + +var ( + fooKey = attribute.Key("ex.com/foo") + barKey = attribute.Key("ex.com/bar") + anotherKey = attribute.Key("ex.com/another") +) + +var tp *sdktrace.TracerProvider + +// initTracer creates and registers trace provider instance. +func initTracer(ctx context.Context) error { + exp, err := otlptracehttp.New(ctx) + if err != nil { + return fmt.Errorf("failed to initialize stdouttrace exporter: %w", err) + } + bsp := sdktrace.NewBatchSpanProcessor(exp) + tp = sdktrace.NewTracerProvider( + sdktrace.WithSampler(sdktrace.AlwaysSample()), + sdktrace.WithSpanProcessor(bsp), + ) + otel.SetTracerProvider(tp) + return nil +} + +func main() { + ctx := context.Background() + // initialize trace provider. + if err := initTracer(ctx); err != nil { + log.Panic(err) + } + + // Create a named tracer with package path as its name. + tracer := tp.Tracer("example.com/go-tracing-app") + defer func() { _ = tp.Shutdown(ctx) }() + + + var span trace.Span + ctx, span = tracer.Start(ctx, "operation") + defer span.End() + span.AddEvent("Nice operation!", trace.WithAttributes(attribute.Int("bogons", 100))) + span.SetAttributes(anotherKey.String("yes")) + if err := service.SubOperation(ctx); err != nil { + panic(err) + } + metricsPort, found := os.LookupEnv("APP_METRICS_PORT") + if !found { + metricsPort = "8080" + } + metricsPath, found := os.LookupEnv("APP_METRICS_PATH") + if !found { + metricsPath = "/metrics" + } + port, found := os.LookupEnv("APP_PORT") + if !found { + port = "8080" + } + + requestCounter := prometheus.NewCounter( + prometheus.CounterOpts{ + Name: "request_count", + Help: "No of request handled", + }) + postgresqlURL := os.Getenv("POSTGRESQL_DB_CONNECT_STRING") + + mux := http.NewServeMux() + mainHandler := mainHandler{ + counter: requestCounter, + service: service.Service{PostgresqlURL: postgresqlURL}, + } + mux.HandleFunc("/", mainHandler.serveHelloWorld) + mux.HandleFunc("/env/user-defined-config", mainHandler.serveUserDefinedConfig) + mux.HandleFunc("/postgresql/migratestatus", mainHandler.servePostgresql) + + if metricsPort != port { + prometheus.MustRegister(requestCounter) + + prometheusMux := http.NewServeMux() + prometheusMux.Handle(metricsPath, promhttp.Handler()) + prometheusServer := &http.Server{ + Addr: ":" + metricsPort, + Handler: prometheusMux, + } + go func() { + if err := prometheusServer.ListenAndServe(); !errors.Is(err, http.ErrServerClosed) { + log.Fatalf("Prometheus HTTP server error: %v", err) + } + log.Println("Prometheus HTTP Stopped serving new connections.") + }() + } else { + mux.Handle("/metrics", promhttp.Handler()) + } + + server := &http.Server{ + Addr: ":" + port, + Handler: mux, + } + go func() { + if err := server.ListenAndServe(); !errors.Is(err, http.ErrServerClosed) { + log.Fatalf("HTTP server error: %v", err) + } + log.Println("Stopped serving new connections.") + }() + + sigChan := make(chan os.Signal, 1) + signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM) + <-sigChan + + shutdownCtx, shutdownRelease := context.WithTimeout(context.Background(), 10*time.Second) + defer shutdownRelease() + + if err := server.Shutdown(shutdownCtx); err != nil { + log.Fatalf("HTTP shutdown error: %v", err) + } + log.Println("Graceful shutdown complete.") +} diff --git a/examples/go/go_tracing_app/migrate.sh b/examples/go/go_tracing_app/migrate.sh new file mode 100755 index 0000000..5a0f850 --- /dev/null +++ b/examples/go/go_tracing_app/migrate.sh @@ -0,0 +1,5 @@ +#!/bin/sh +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +PGPASSWORD="${POSTGRESQL_DB_PASSWORD}" psql -h "${POSTGRESQL_DB_HOSTNAME}" -U "${POSTGRESQL_DB_USERNAME}" "${POSTGRESQL_DB_NAME}" -c "CREATE TABLE IF NOT EXISTS USERS(NAME CHAR(50));" diff --git a/examples/go/go_tracing_app/rockcraft.yaml b/examples/go/go_tracing_app/rockcraft.yaml new file mode 100644 index 0000000..b9052c8 --- /dev/null +++ b/examples/go/go_tracing_app/rockcraft.yaml @@ -0,0 +1,60 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +name: go-tracing-app +# see https://documentation.ubuntu.com/rockcraft/en/stable/explanation/bases/ +# for more information about bases and using 'bare' bases for chiselled rocks +base: ubuntu@24.04 # the base environment for this Go application +version: '0.1' # just for humans. Semantic versioning is recommended +summary: A summary of your Go application # 79 char long summary +description: | + This is go-tracing-app's description. You have a paragraph or two to tell the + most important story about it. Keep it under 100 words though, + we live in tweetspace and your description wants to look good in the + container registries out there. +platforms: # The platforms this rock should be built on and run on + amd64: + +# to ensure the go-framework extension functions properly, your Go project +# should have a go.mod file. Check the parts section for the selection of +# the default binary. +# see https://documentation.ubuntu.com/rockcraft/en/stable/reference/extensions/go-framework +# for more information. +# +-- go_app +# | |-- go.mod +# | |-- migrate.sh + +extensions: + - go-framework + +# Uncomment the sections you need and adjust according to your requirements. +parts: +# go-framework/install-app: +# # Select a specific Go version. Otherwise the current stable one will be used. +# build-snaps: +# - go/1.22/stable +# organize: +# # If the main package is in the base directory and the rockcraft name +# # attribute is equal to the go module name, the name of the server will +# # be selected correctly, otherwise you can adjust it. +# # The file in /usr/local/bin/ with the name of the rockcraft will be +# # the binary to run your server. +# # You can also include here other binary files to be included in the rock. +# bin/otherbinary: usr/local/bin/projectname + +# go-framework/assets: +# stage: +# # By default, only the files in templates/ and static/ +# # are copied into the image. You can modify the list below to override +# # the default list and include or exclude specific files/directories +# # in your project. +# # Note: Prefix each entry with "go/" followed by the local path. +# - go/templates +# - go/static +# - go/otherdirectory +# - go/otherfile + + runtime-debs: + plugin: nil + stage-packages: + # Added manually for the migrations + - postgresql-client diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index 993b2c8..7e8bd60 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -174,33 +174,20 @@ def _config(self) -> str: framework_environments = self._container.get_plan().to_dict()["services"][ self._workload_config.framework ]["environment"] - tracing_endpoint = None - tracing_service_name = None if framework_environments.get("OTEL_EXPORTER_OTLP_ENDPOINT", None): - tracing_endpoint = framework_environments["OTEL_EXPORTER_OTLP_ENDPOINT"] - tracing_service_name = framework_environments["OTEL_SERVICE_NAME"] - config += textwrap.dedent( - f"""\ + """\ from opentelemetry import trace - from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( + OTLPSpanExporter, + ) from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor def post_fork(server, worker): - resource = Resource.create( - attributes={{ - "service.name": "{tracing_service_name}", - "worker": worker.pid, - }} - ) trace.set_tracer_provider(TracerProvider(resource=resource)) - span_processor = BatchSpanProcessor( - OTLPSpanExporter( - endpoint="{tracing_endpoint}/v1/traces" - ) - ) + span_processor = BatchSpanProcessor(OTLPSpanExporter()) trace.get_tracer_provider().add_span_processor(span_processor) """ ) diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 018080f..843b7fb 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -146,7 +146,7 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self.on[self._workload_config.container_name].pebble_ready, self._on_pebble_ready ) - def _init_redis(self, requires: dict[str, RelationMeta]) -> RedisRequires | None: + def _init_redis(self, requires: dict[str, RelationMeta]) -> "RedisRequires | None": """Initialize the Redis relation if its required. Args: @@ -161,7 +161,7 @@ def _init_redis(self, requires: dict[str, RelationMeta]) -> RedisRequires | None self.framework.observe(self.on.redis_relation_updated, self._on_redis_relation_updated) return _redis - def _init_s3(self, requires: dict[str, RelationMeta]) -> S3Requirer | None: + def _init_s3(self, requires: dict[str, RelationMeta]) -> "S3Requirer | None": """Initialize the S3 relation if its required. Args: @@ -177,7 +177,7 @@ def _init_s3(self, requires: dict[str, RelationMeta]) -> S3Requirer | None: self.framework.observe(_s3.on.credentials_gone, self._on_s3_credential_gone) return _s3 - def _init_saml(self, requires: dict[str, RelationMeta]) -> SamlRequires | None: + def _init_saml(self, requires: dict[str, RelationMeta]) -> "SamlRequires | None": """Initialize the SAML relation if its required. Args: @@ -192,7 +192,7 @@ def _init_saml(self, requires: dict[str, RelationMeta]) -> SamlRequires | None: self.framework.observe(_saml.on.saml_data_available, self._on_saml_data_available) return _saml - def _init_rabbitmq(self, requires: dict[str, RelationMeta]) -> RabbitMQRequires | None: + def _init_rabbitmq(self, requires: dict[str, RelationMeta]) -> "RabbitMQRequires | None": """Initialize the RabbitMQ relation if its required. Args: @@ -215,7 +215,7 @@ def _init_rabbitmq(self, requires: dict[str, RelationMeta]) -> RabbitMQRequires return _rabbitmq - def _init_tracing(self, requires: dict[str, RelationMeta]) -> TracingEndpointRequirer | None: + def _init_tracing(self, requires: dict[str, RelationMeta]) -> "TracingEndpointRequirer | None": """Initialize the Tracing relation if its required. Args: @@ -454,7 +454,7 @@ def _create_charm_state(self) -> CharmState: tracing_relation_data = None if self._tracing and self._tracing.is_ready(): tracing_relation_data = TempoParameters( - endpoint=self._tracing.get_endpoint(protocol="otlp_http"), + endpoint=f'{self._tracing.get_endpoint(protocol="otlp_http")}', service_name=f"{self.framework.meta.name}-app", ) return CharmState.from_charm( From f8c68ce1d977529a56d9cfb8e4bbc5df153b2f84 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 8 Jan 2025 14:42:14 +0300 Subject: [PATCH 17/70] chore(examples): Fix naming issues with tracing apps examples. --- .../django_tracing_app}/__init__.py | 0 .../django_tracing_app}/asgi.py | 4 ++-- .../django_tracing_app}/settings.py | 6 +++--- .../django_tracing_app}/urls.py | 2 +- .../django_tracing_app}/wsgi.py | 4 ++-- .../{django_app => django_tracing_app}/manage.py | 2 +- .../{django_app => django_tracing_app}/migrate.sh | 0 .../{django_app => django_tracing_app}/testing/__init__.py | 0 .../{django_app => django_tracing_app}/testing/admin.py | 0 .../{django_app => django_tracing_app}/testing/apps.py | 0 .../testing/migrations/__init__.py | 0 .../{django_app => django_tracing_app}/testing/models.py | 0 .../{django_app => django_tracing_app}/testing/tests.py | 0 .../{django_app => django_tracing_app}/testing/views.py | 0 examples/go/go_tracing_app/rockcraft.yaml | 2 +- 15 files changed, 10 insertions(+), 10 deletions(-) rename examples/django/django_tracing_app/{django_app/django_app => django_tracing_app/django_tracing_app}/__init__.py (100%) rename examples/django/django_tracing_app/{django_app/django_app => django_tracing_app/django_tracing_app}/asgi.py (74%) rename examples/django/django_tracing_app/{django_app/django_app => django_tracing_app/django_tracing_app}/settings.py (96%) rename examples/django/django_tracing_app/{django_app/django_app => django_tracing_app/django_tracing_app}/urls.py (95%) rename examples/django/django_tracing_app/{django_app/django_app => django_tracing_app/django_tracing_app}/wsgi.py (74%) rename examples/django/django_tracing_app/{django_app => django_tracing_app}/manage.py (88%) rename examples/django/django_tracing_app/{django_app => django_tracing_app}/migrate.sh (100%) rename examples/django/django_tracing_app/{django_app => django_tracing_app}/testing/__init__.py (100%) rename examples/django/django_tracing_app/{django_app => django_tracing_app}/testing/admin.py (100%) rename examples/django/django_tracing_app/{django_app => django_tracing_app}/testing/apps.py (100%) rename examples/django/django_tracing_app/{django_app => django_tracing_app}/testing/migrations/__init__.py (100%) rename examples/django/django_tracing_app/{django_app => django_tracing_app}/testing/models.py (100%) rename examples/django/django_tracing_app/{django_app => django_tracing_app}/testing/tests.py (100%) rename examples/django/django_tracing_app/{django_app => django_tracing_app}/testing/views.py (100%) diff --git a/examples/django/django_tracing_app/django_app/django_app/__init__.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/__init__.py similarity index 100% rename from examples/django/django_tracing_app/django_app/django_app/__init__.py rename to examples/django/django_tracing_app/django_tracing_app/django_tracing_app/__init__.py diff --git a/examples/django/django_tracing_app/django_app/django_app/asgi.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/asgi.py similarity index 74% rename from examples/django/django_tracing_app/django_app/django_app/asgi.py rename to examples/django/django_tracing_app/django_tracing_app/django_tracing_app/asgi.py index 1616128..cc727ed 100644 --- a/examples/django/django_tracing_app/django_app/django_app/asgi.py +++ b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/asgi.py @@ -2,7 +2,7 @@ # See LICENSE file for licensing details. """ -ASGI config for django_app project. +ASGI config for django_tracing_app project. It exposes the ASGI callable as a module-level variable named ``application``. @@ -14,6 +14,6 @@ from django.core.asgi import get_asgi_application -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_app.settings") +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_tracing_app.settings") application = get_asgi_application() diff --git a/examples/django/django_tracing_app/django_app/django_app/settings.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/settings.py similarity index 96% rename from examples/django/django_tracing_app/django_app/django_app/settings.py rename to examples/django/django_tracing_app/django_tracing_app/django_tracing_app/settings.py index 22e0102..b2079a0 100644 --- a/examples/django/django_tracing_app/django_app/django_app/settings.py +++ b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/settings.py @@ -2,7 +2,7 @@ # See LICENSE file for licensing details. """ -Django settings for django_app project. +Django settings for django_tracing_app project. Generated by 'django-admin startproject' using Django 5.0.2. @@ -53,7 +53,7 @@ "django.middleware.clickjacking.XFrameOptionsMiddleware", ] -ROOT_URLCONF = "django_app.urls" +ROOT_URLCONF = "django_tracing_app.urls" TEMPLATES = [ { @@ -71,7 +71,7 @@ }, ] -WSGI_APPLICATION = "django_app.wsgi.application" +WSGI_APPLICATION = "django_tracing_app.wsgi.application" # Database diff --git a/examples/django/django_tracing_app/django_app/django_app/urls.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py similarity index 95% rename from examples/django/django_tracing_app/django_app/django_app/urls.py rename to examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py index fc8c0b0..0a05d9f 100644 --- a/examples/django/django_tracing_app/django_app/django_app/urls.py +++ b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py @@ -2,7 +2,7 @@ # See LICENSE file for licensing details. """ -URL configuration for django_app project. +URL configuration for django_tracing_app project. The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/5.0/topics/http/urls/ diff --git a/examples/django/django_tracing_app/django_app/django_app/wsgi.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/wsgi.py similarity index 74% rename from examples/django/django_tracing_app/django_app/django_app/wsgi.py rename to examples/django/django_tracing_app/django_tracing_app/django_tracing_app/wsgi.py index f422221..4c27f8f 100644 --- a/examples/django/django_tracing_app/django_app/django_app/wsgi.py +++ b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/wsgi.py @@ -2,7 +2,7 @@ # See LICENSE file for licensing details. """ -WSGI config for django_app project. +WSGI config for django_tracing_app project. It exposes the WSGI callable as a module-level variable named ``application``. @@ -14,6 +14,6 @@ from django.core.wsgi import get_wsgi_application -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_app.settings") +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_tracing_app.settings") application = get_wsgi_application() diff --git a/examples/django/django_tracing_app/django_app/manage.py b/examples/django/django_tracing_app/django_tracing_app/manage.py similarity index 88% rename from examples/django/django_tracing_app/django_app/manage.py rename to examples/django/django_tracing_app/django_tracing_app/manage.py index cf94f1f..78d1aca 100755 --- a/examples/django/django_tracing_app/django_app/manage.py +++ b/examples/django/django_tracing_app/django_tracing_app/manage.py @@ -10,7 +10,7 @@ def main(): """Run administrative tasks.""" - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_app.settings") + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_tracing_app.settings") try: from django.core.management import execute_from_command_line except ImportError as exc: diff --git a/examples/django/django_tracing_app/django_app/migrate.sh b/examples/django/django_tracing_app/django_tracing_app/migrate.sh similarity index 100% rename from examples/django/django_tracing_app/django_app/migrate.sh rename to examples/django/django_tracing_app/django_tracing_app/migrate.sh diff --git a/examples/django/django_tracing_app/django_app/testing/__init__.py b/examples/django/django_tracing_app/django_tracing_app/testing/__init__.py similarity index 100% rename from examples/django/django_tracing_app/django_app/testing/__init__.py rename to examples/django/django_tracing_app/django_tracing_app/testing/__init__.py diff --git a/examples/django/django_tracing_app/django_app/testing/admin.py b/examples/django/django_tracing_app/django_tracing_app/testing/admin.py similarity index 100% rename from examples/django/django_tracing_app/django_app/testing/admin.py rename to examples/django/django_tracing_app/django_tracing_app/testing/admin.py diff --git a/examples/django/django_tracing_app/django_app/testing/apps.py b/examples/django/django_tracing_app/django_tracing_app/testing/apps.py similarity index 100% rename from examples/django/django_tracing_app/django_app/testing/apps.py rename to examples/django/django_tracing_app/django_tracing_app/testing/apps.py diff --git a/examples/django/django_tracing_app/django_app/testing/migrations/__init__.py b/examples/django/django_tracing_app/django_tracing_app/testing/migrations/__init__.py similarity index 100% rename from examples/django/django_tracing_app/django_app/testing/migrations/__init__.py rename to examples/django/django_tracing_app/django_tracing_app/testing/migrations/__init__.py diff --git a/examples/django/django_tracing_app/django_app/testing/models.py b/examples/django/django_tracing_app/django_tracing_app/testing/models.py similarity index 100% rename from examples/django/django_tracing_app/django_app/testing/models.py rename to examples/django/django_tracing_app/django_tracing_app/testing/models.py diff --git a/examples/django/django_tracing_app/django_app/testing/tests.py b/examples/django/django_tracing_app/django_tracing_app/testing/tests.py similarity index 100% rename from examples/django/django_tracing_app/django_app/testing/tests.py rename to examples/django/django_tracing_app/django_tracing_app/testing/tests.py diff --git a/examples/django/django_tracing_app/django_app/testing/views.py b/examples/django/django_tracing_app/django_tracing_app/testing/views.py similarity index 100% rename from examples/django/django_tracing_app/django_app/testing/views.py rename to examples/django/django_tracing_app/django_tracing_app/testing/views.py diff --git a/examples/go/go_tracing_app/rockcraft.yaml b/examples/go/go_tracing_app/rockcraft.yaml index b9052c8..c4b8f29 100644 --- a/examples/go/go_tracing_app/rockcraft.yaml +++ b/examples/go/go_tracing_app/rockcraft.yaml @@ -19,7 +19,7 @@ platforms: # The platforms this rock should be built on and run on # the default binary. # see https://documentation.ubuntu.com/rockcraft/en/stable/reference/extensions/go-framework # for more information. -# +-- go_app +# +-- go_tracing_app # | |-- go.mod # | |-- migrate.sh From 6f77bb3c39d9cf8a4552fbe25b6003fcd9f70eb2 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Thu, 9 Jan 2025 06:24:40 +0300 Subject: [PATCH 18/70] chore(lint): Lint --- examples/fastapi/fastapi_tracing_app/app.py | 4 +--- src/paas_charm/_gunicorn/webserver.py | 15 +++++++++++---- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/examples/fastapi/fastapi_tracing_app/app.py b/examples/fastapi/fastapi_tracing_app/app.py index 9871c12..996a9cb 100644 --- a/examples/fastapi/fastapi_tracing_app/app.py +++ b/examples/fastapi/fastapi_tracing_app/app.py @@ -17,9 +17,7 @@ app = FastAPI() set_tracer_provider(TracerProvider()) -get_tracer_provider().add_span_processor( - BatchSpanProcessor(OTLPSpanExporter()) -) +get_tracer_provider().add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) get_tracer_provider().add_span_processor(BatchSpanProcessor(ConsoleSpanExporter())) FastAPIInstrumentor.instrument_app(app) diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index 7e8bd60..0e2d814 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -171,10 +171,17 @@ def _config(self) -> str: statsd_host = {repr(STATSD_HOST)} """ ) - framework_environments = self._container.get_plan().to_dict()["services"][ - self._workload_config.framework - ]["environment"] - if framework_environments.get("OTEL_EXPORTER_OTLP_ENDPOINT", None): + framework_environments = None + plan = self._container.get_plan().to_dict() + services = plan.get("services", None) + if services: + service_framework = services.get(self._workload_config.framework, None) + if service_framework: + framework_environments = service_framework.get("environment", None) + + if framework_environments and framework_environments.get( + "OTEL_EXPORTER_OTLP_ENDPOINT", None + ): config += textwrap.dedent( """\ from opentelemetry import trace From 678276e6897a3444fd3dc7f7d8f1e460a0609f5d Mon Sep 17 00:00:00 2001 From: ali ugur Date: Thu, 9 Jan 2025 08:39:19 +0300 Subject: [PATCH 19/70] chore(fix): Fix gunicorn fork tracer --- src/paas_charm/_gunicorn/webserver.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index 0e2d814..92b5b2f 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -188,12 +188,11 @@ def _config(self) -> str: from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( OTLPSpanExporter, ) - from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor def post_fork(server, worker): - trace.set_tracer_provider(TracerProvider(resource=resource)) + trace.set_tracer_provider(TracerProvider()) span_processor = BatchSpanProcessor(OTLPSpanExporter()) trace.get_tracer_provider().add_span_processor(span_processor) """ From 484f1912fe5333048796fd04309f5baf02a855f4 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Thu, 9 Jan 2025 08:55:27 +0300 Subject: [PATCH 20/70] chore(test): Add tracing image options --- tests/integration/django/conftest.py | 9 +++++++++ tests/integration/fastapi/conftest.py | 9 +++++++++ tests/integration/flask/conftest.py | 9 +++++++++ tests/integration/go/conftest.py | 9 +++++++++ 4 files changed, 36 insertions(+) diff --git a/tests/integration/django/conftest.py b/tests/integration/django/conftest.py index 3f01742..bae372c 100644 --- a/tests/integration/django/conftest.py +++ b/tests/integration/django/conftest.py @@ -23,6 +23,15 @@ def cwd(): return os.chdir(PROJECT_ROOT / "examples/django/charm") +@pytest.fixture(scope="module", name="django_tracing_app_image") +def fixture_django_tracing_app_image(pytestconfig: Config): + """Return the --django-tracing-app-image test parameter.""" + image = pytestconfig.getoption("--django-app-tracing-image") + if not image: + raise ValueError("the following arguments are required: --django-app-tracing-image") + return image + + @pytest.fixture(scope="module", name="django_app_image") def fixture_django_app_image(pytestconfig: Config): """Return the --django-app-image test parameter.""" diff --git a/tests/integration/fastapi/conftest.py b/tests/integration/fastapi/conftest.py index 3211232..a4ecac1 100644 --- a/tests/integration/fastapi/conftest.py +++ b/tests/integration/fastapi/conftest.py @@ -22,6 +22,15 @@ def cwd(): return os.chdir(PROJECT_ROOT / "examples/fastapi/charm") +@pytest.fixture(scope="module", name="fastapi_tracing_app_image") +def fixture_fastapi_tracing_app_image(pytestconfig: Config): + """Return the --fastapi-tracing-app-image test parameter.""" + image = pytestconfig.getoption("--fastapi-tracing-app-image") + if not image: + raise ValueError("the following arguments are required: --fastapi-tracing-app-image") + return image + + @pytest.fixture(scope="module", name="fastapi_app_image") def fixture_fastapi_app_image(pytestconfig: Config): """Return the --fastapi-app-image test parameter.""" diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index 1e40a63..fac3350 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -26,6 +26,15 @@ def cwd(): return os.chdir(PROJECT_ROOT / "examples/flask") +@pytest.fixture(scope="module", name="test_tracing_flask_image") +def fixture_test_tracing_flask_image(pytestconfig: Config): + """Return the --test-flask-tracing-image test parameter.""" + test_flask_image = pytestconfig.getoption("--test-tracing-flask-image") + if not test_flask_image: + raise ValueError("the following arguments are required: --test-tracing-flask-image") + return test_flask_image + + @pytest.fixture(scope="module", name="test_async_flask_image") def fixture_test_async_flask_image(pytestconfig: Config): """Return the --test-async-flask-image test parameter.""" diff --git a/tests/integration/go/conftest.py b/tests/integration/go/conftest.py index b7a31db..a78fb64 100644 --- a/tests/integration/go/conftest.py +++ b/tests/integration/go/conftest.py @@ -22,6 +22,15 @@ def cwd(): return os.chdir(PROJECT_ROOT / "examples/go/charm") +@pytest.fixture(scope="module", name="go_app_tracing_image") +def fixture_go_tracing_app_image(pytestconfig: Config): + """Return the --go-app-image test parameter.""" + image = pytestconfig.getoption("--go-app-tracing-image") + if not image: + raise ValueError("the following arguments are required: --go-app-tracing-image") + return image + + @pytest.fixture(scope="module", name="go_app_image") def fixture_go_app_image(pytestconfig: Config): """Return the --go-app-image test parameter.""" From f547d3a1ea2d4a5952c29a13cf331564a274fb7c Mon Sep 17 00:00:00 2001 From: ali ugur Date: Thu, 9 Jan 2025 11:38:29 +0300 Subject: [PATCH 21/70] chore(test): Fix test tracing image option name --- tests/integration/django/conftest.py | 4 ++-- tests/integration/go/conftest.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/integration/django/conftest.py b/tests/integration/django/conftest.py index bae372c..ec953ab 100644 --- a/tests/integration/django/conftest.py +++ b/tests/integration/django/conftest.py @@ -26,9 +26,9 @@ def cwd(): @pytest.fixture(scope="module", name="django_tracing_app_image") def fixture_django_tracing_app_image(pytestconfig: Config): """Return the --django-tracing-app-image test parameter.""" - image = pytestconfig.getoption("--django-app-tracing-image") + image = pytestconfig.getoption("--django-tracing-app-image") if not image: - raise ValueError("the following arguments are required: --django-app-tracing-image") + raise ValueError("the following arguments are required: --django-tracing-app-image") return image diff --git a/tests/integration/go/conftest.py b/tests/integration/go/conftest.py index a78fb64..d28408d 100644 --- a/tests/integration/go/conftest.py +++ b/tests/integration/go/conftest.py @@ -22,12 +22,12 @@ def cwd(): return os.chdir(PROJECT_ROOT / "examples/go/charm") -@pytest.fixture(scope="module", name="go_app_tracing_image") +@pytest.fixture(scope="module", name="go_tracing_app_image") def fixture_go_tracing_app_image(pytestconfig: Config): - """Return the --go-app-image test parameter.""" - image = pytestconfig.getoption("--go-app-tracing-image") + """Return the --go-tracing-app-image test parameter.""" + image = pytestconfig.getoption("--go-tracing-app-image") if not image: - raise ValueError("the following arguments are required: --go-app-tracing-image") + raise ValueError("the following arguments are required: --go-tracing-app-image") return image From d5b34a44fae23f85650697e3dffbb3b44e79a0d1 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Thu, 9 Jan 2025 12:20:46 +0300 Subject: [PATCH 22/70] chore(test): Fix options --- tests/conftest.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 974a0a0..d733b20 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,9 +9,13 @@ def pytest_addoption(parser): parser.addoption("--charm-file", action="extend", nargs="+", default=[]) parser.addoption("--test-flask-image", action="store") parser.addoption("--test-async-flask-image", action="store") + parser.addoption("--test-tracing-flask-image", action="store") parser.addoption("--test-db-flask-image", action="store") parser.addoption("--django-app-image", action="store") parser.addoption("--django-async-app-image", action="store") + parser.addoption("--django-tracing-app-image", action="store") parser.addoption("--fastapi-app-image", action="store") + parser.addoption("--fastapi-tracing-app-image", action="store") parser.addoption("--go-app-image", action="store") + parser.addoption("--go-tracing-app-image", action="store") parser.addoption("--localstack-address", action="store") From 1a44207f303952f7841479491250b9edc0340c07 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Fri, 10 Jan 2025 06:43:33 +0300 Subject: [PATCH 23/70] chore(test): Add integration test for flask tracing --- tests/integration/flask/conftest.py | 86 +++++++++++++++++++ tests/integration/flask/requirements.txt | 1 + .../flask/test_workload_tracing.py | 58 +++++++++++++ tests/integration/helpers.py | 26 ++++++ 4 files changed, 171 insertions(+) create mode 100644 tests/integration/flask/test_workload_tracing.py diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index fac3350..a18f917 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -144,6 +144,92 @@ async def flask_async_app_fixture(build_charm: str, model: Model, test_async_fla return app +@pytest_asyncio.fixture(scope="module", name="flask_tracing_app") +async def flask_tracing_app_fixture(build_charm: str, model: Model, test_tracing_flask_image: str): + """Build and deploy the flask charm with test-tracing-flask image.""" + app_name = "flask-tracing-k8s" + + resources = { + "flask-app-image": test_tracing_flask_image, + } + app = await model.deploy( + build_charm, resources=resources, application_name=app_name, series="jammy" + ) + await model.wait_for_idle(raise_on_blocked=True) + return app + +async def deploy_and_configure_minio(ops_test: OpsTest) -> None: + """Deploy and set up minio and s3-integrator needed for s3-like storage backend in the HA charms.""" + config = { + "access-key": "accesskey", + "secret-key": "secretkey", + } + await ops_test.model.deploy("minio", channel="edge", trust=True, config=config) + await ops_test.model.wait_for_idle( + apps=["minio"], status="active", timeout=2000, idle_period=45 + ) + minio_addr = await unit_address(ops_test, "minio", 0) + + mc_client = Minio( + f"{minio_addr}:9000", + access_key="accesskey", + secret_key="secretkey", + secure=False, + ) + + # create tempo bucket + found = mc_client.bucket_exists("tempo") + if not found: + mc_client.make_bucket("tempo") + + # configure s3-integrator + s3_integrator_app: Application = ops_test.model.applications["s3-integrator"] + s3_integrator_leader: Unit = s3_integrator_app.units[0] + + await s3_integrator_app.set_config( + { + "endpoint": f"minio-0.minio-endpoints.{ops_test.model.name}.svc.cluster.local:9000", + "bucket": "tempo", + } + ) + + action = await s3_integrator_leader.run_action("sync-s3-credentials", **config) + action_result = await action.wait() + assert action_result.status == "completed" + +@pytest_asyncio.fixture(scope="module", name="tempo_app") +async def deploy_tempo_cluster(ops_test: OpsTest): + """Deploys tempo in its HA version together with minio and s3-integrator.""" + tempo_app = "tempo" + worker_app = "tempo-worker" + tempo_worker_charm_url, worker_channel = "tempo-worker-k8s", "edge" + tempo_coordinator_charm_url, coordinator_channel = "tempo-coordinator-k8s", "edge" + await ops_test.model.deploy( + tempo_worker_charm_url, application_name=worker_app, channel=worker_channel, trust=True + ) + app = await ops_test.model.deploy( + tempo_coordinator_charm_url, + application_name=tempo_app, + channel=coordinator_channel, + trust=True, + ) + await ops_test.model.deploy("s3-integrator", channel="edge") + + await ops_test.model.integrate(tempo_app + ":s3", "s3-integrator" + ":s3-credentials") + await ops_test.model.integrate(tempo_app + ":tempo-cluster", worker_app + ":tempo-cluster") + + await deploy_and_configure_minio(ops_test) + async with ops_test.fast_forward(): + await ops_test.model.wait_for_idle( + apps=[tempo_app, worker_app, "s3-integrator"], + status="active", + timeout=2000, + idle_period=30, + # TODO: remove when https://github.com/canonical/tempo-coordinator-k8s-operator/issues/90 is fixed + raise_on_error=False, + ) + return app + @pytest_asyncio.fixture(scope="module", name="traefik_app") async def deploy_traefik_fixture( model: Model, diff --git a/tests/integration/flask/requirements.txt b/tests/integration/flask/requirements.txt index 13ce280..099fdcd 100644 --- a/tests/integration/flask/requirements.txt +++ b/tests/integration/flask/requirements.txt @@ -1,3 +1,4 @@ ops >= 1.5.0 pytest-operator >= 0.32.0 aiohttp == 3.11.7 +tenacity diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py new file mode 100644 index 0000000..99eef3d --- /dev/null +++ b/tests/integration/flask/test_workload_tracing.py @@ -0,0 +1,58 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Integration tests for Flask workers and schedulers.""" + +import asyncio +import logging +import time + +import aiohttp +import pytest +from juju.application import Application +from juju.model import Model +from pytest_operator.plugin import OpsTest + +logger = logging.getLogger(__name__) + + +@pytest.mark.usefixtures("flask_tracing_app") +@pytest.mark.usefixtures("tempo_app") +async def test_workload_tracing( + ops_test: OpsTest, + model: Model, + flask_tracing_app: Application, + tempo_app: Application, + get_unit_ips, +): + """ + arrange: Flask is deployed with async enabled rock. Change gunicorn worker class. + act: Do 15 requests that would take 2 seconds each. + assert: All 15 requests should be served in under 3 seconds. + """ + await ops_test.model.integrate(f"{flask_tracing_app.name}:tracing", f"{tempo_app.name}:tracing") + + await ops_test.model.wait_for_idle( + apps=[flask_tracing_app.name, tempo_app.name], status="active", timeout=300 + ) + # the flask unit is not important. Take the first one + flask_unit_ip = (await get_unit_ips(flask_tracing_app.name))[0] + + async def _fetch_page(session): + params = {"duration": 2} + async with session.get(f"http://{flask_unit_ip}:8000", params=params) as response: + return await response.text() + + async with aiohttp.ClientSession() as session: + page = _fetch_page(session) + await asyncio.gather([page]) + + print("--------------------------") + print(f"{flask_tracing_app.name}-app") + print("--------------------------") + # verify workload traces are ingested into Tempo + assert await get_traces_patiently( + await get_application_ip(ops_test, tempo_app.name), + service_name=f"{flask_tracing_app.name}-app", + tls=False, + ) diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index 9c912fb..23ac7b8 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -6,6 +6,8 @@ import pathlib import uuid import zipfile +import requests +from tenacity import retry, stop_after_attempt, wait_exponential import yaml @@ -44,3 +46,27 @@ def inject_charm_config(charm: pathlib.Path | str, config: dict, tmp_dir: pathli with open(charm, "wb") as new_charm_file: new_charm_file.write(new_charm.getvalue()) return str(charm) + + +def get_traces(tempo_host: str, service_name: str, tls=False): + """Get traces directly from Tempo REST API.""" + url = f"{'https' if tls else 'http'}://{tempo_host}:3200/api/search?tags=service.name={service_name}" + req = requests.get( + url, + verify=False, + ) + assert req.status_code == 200 + traces = json.loads(req.text)["traces"] + return traces + + +@retry(stop=stop_after_attempt(15), wait=wait_exponential(multiplier=1, min=4, max=10)) +async def get_traces_patiently(tempo_host, service_name="tracegen-otlp_http", tls=False): + """Get traces directly from Tempo REST API, but also try multiple times. + + Useful for cases when Tempo might not return the traces immediately (its API is known for returning data in + random order). + """ + traces = get_traces(tempo_host, service_name=service_name, tls=tls) + assert len(traces) > 0 + return traces From cccd115eadb13a20c84b58598ce03a8e4b181aac Mon Sep 17 00:00:00 2001 From: ali ugur Date: Fri, 10 Jan 2025 06:46:53 +0300 Subject: [PATCH 24/70] chore: Format --- tests/integration/flask/conftest.py | 3 +++ tests/integration/flask/test_workload_tracing.py | 4 +++- tests/integration/helpers.py | 4 ++-- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index a18f917..bc65076 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -158,6 +158,7 @@ async def flask_tracing_app_fixture(build_charm: str, model: Model, test_tracing await model.wait_for_idle(raise_on_blocked=True) return app + async def deploy_and_configure_minio(ops_test: OpsTest) -> None: """Deploy and set up minio and s3-integrator needed for s3-like storage backend in the HA charms.""" config = { @@ -197,6 +198,7 @@ async def deploy_and_configure_minio(ops_test: OpsTest) -> None: action_result = await action.wait() assert action_result.status == "completed" + @pytest_asyncio.fixture(scope="module", name="tempo_app") async def deploy_tempo_cluster(ops_test: OpsTest): """Deploys tempo in its HA version together with minio and s3-integrator.""" @@ -230,6 +232,7 @@ async def deploy_tempo_cluster(ops_test: OpsTest): ) return app + @pytest_asyncio.fixture(scope="module", name="traefik_app") async def deploy_traefik_fixture( model: Model, diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py index 99eef3d..06a4c7d 100644 --- a/tests/integration/flask/test_workload_tracing.py +++ b/tests/integration/flask/test_workload_tracing.py @@ -30,7 +30,9 @@ async def test_workload_tracing( act: Do 15 requests that would take 2 seconds each. assert: All 15 requests should be served in under 3 seconds. """ - await ops_test.model.integrate(f"{flask_tracing_app.name}:tracing", f"{tempo_app.name}:tracing") + await ops_test.model.integrate( + f"{flask_tracing_app.name}:tracing", f"{tempo_app.name}:tracing" + ) await ops_test.model.wait_for_idle( apps=[flask_tracing_app.name, tempo_app.name], status="active", timeout=300 diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index 23ac7b8..e772f6a 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -6,10 +6,10 @@ import pathlib import uuid import zipfile -import requests -from tenacity import retry, stop_after_attempt, wait_exponential +import requests import yaml +from tenacity import retry, stop_after_attempt, wait_exponential def inject_venv(charm: pathlib.Path | str, src: pathlib.Path | str): From 3946636d5535f7397b5eb0ad93cf8289dc31dc66 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Fri, 10 Jan 2025 08:45:08 +0300 Subject: [PATCH 25/70] chore(test): Add tracing test to workflow --- .github/workflows/integration_test.yaml | 2 +- src/paas_charm/charm.py | 48 ++++++++++++++++++------- 2 files changed, 36 insertions(+), 14 deletions(-) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index a35f9f2..07be6c6 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -16,7 +16,7 @@ jobs: charmcraft-ref: flask-async-worker pre-run-script: localstack-installation.sh # charmcraft-channel: latest/edge - modules: '["test_charm.py", "test_cos.py", "test_database.py", "test_db_migration.py", "test_django.py", "test_django_integrations.py", "test_fastapi.py", "test_go.py", "test_integrations.py", "test_proxy.py", "test_workers.py"]' + modules: '["test_charm.py", "test_cos.py", "test_database.py", "test_db_migration.py", "test_django.py", "test_django_integrations.py", "test_fastapi.py", "test_go.py", "test_integrations.py", "test_proxy.py", "test_workers.py", "test_workload_tracing.py"]' rockcraft-channel: latest/edge juju-channel: ${{ matrix.juju-version }} channel: 1.29-strict/stable diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 843b7fb..5c53363 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -157,8 +157,14 @@ def _init_redis(self, requires: dict[str, RelationMeta]) -> "RedisRequires | Non """ _redis = None if "redis" in requires and requires["redis"].interface_name == "redis": - _redis = RedisRequires(charm=self, relation_name="redis") - self.framework.observe(self.on.redis_relation_updated, self._on_redis_relation_updated) + try: + _redis = RedisRequires(charm=self, relation_name="redis") + self.framework.observe(self.on.redis_relation_updated, self._on_redis_relation_updated) + except NameError: + logger.exception( + "Missing charm library, please run `charmcraft fetch-lib charms.redis_k8s.v0.redis`" + ) + return _redis def _init_s3(self, requires: dict[str, RelationMeta]) -> "S3Requirer | None": @@ -172,9 +178,14 @@ def _init_s3(self, requires: dict[str, RelationMeta]) -> "S3Requirer | None": """ _s3 = None if "s3" in requires and requires["s3"].interface_name == "s3": - _s3 = S3Requirer(charm=self, relation_name="s3", bucket_name=self.app.name) - self.framework.observe(_s3.on.credentials_changed, self._on_s3_credential_changed) - self.framework.observe(_s3.on.credentials_gone, self._on_s3_credential_gone) + try: + _s3 = S3Requirer(charm=self, relation_name="s3", bucket_name=self.app.name) + self.framework.observe(_s3.on.credentials_changed, self._on_s3_credential_changed) + self.framework.observe(_s3.on.credentials_gone, self._on_s3_credential_gone) + except NameError: + logger.exception( + "Missing charm library, please run `charmcraft fetch-lib charms.data_platform_libs.v0.s3`" + ) return _s3 def _init_saml(self, requires: dict[str, RelationMeta]) -> "SamlRequires | None": @@ -188,8 +199,13 @@ def _init_saml(self, requires: dict[str, RelationMeta]) -> "SamlRequires | None" """ _saml = None if "saml" in requires and requires["saml"].interface_name == "saml": - _saml = SamlRequires(self) - self.framework.observe(_saml.on.saml_data_available, self._on_saml_data_available) + try: + _saml = SamlRequires(self) + self.framework.observe(_saml.on.saml_data_available, self._on_saml_data_available) + except NameError: + logger.exception( + "Missing charm library, please run `charmcraft fetch-lib charms.saml_integrator.v0.saml`" + ) return _saml def _init_rabbitmq(self, requires: dict[str, RelationMeta]) -> "RabbitMQRequires | None": @@ -226,11 +242,17 @@ def _init_tracing(self, requires: dict[str, RelationMeta]) -> "TracingEndpointRe """ _tracing = None if "tracing" in requires and requires["tracing"].interface_name == "tracing": - _tracing = TracingEndpointRequirer( - self, relation_name="tracing", protocols=["otlp_http"] - ) - self.framework.observe(_tracing.on.endpoint_changed, self._on_tracing_relation_changed) - self.framework.observe(_tracing.on.endpoint_removed, self._on_tracing_relation_broken) + try: + _tracing = TracingEndpointRequirer( + self, relation_name="tracing", protocols=["otlp_http"] + ) + self.framework.observe(_tracing.on.endpoint_changed, self._on_tracing_relation_changed) + self.framework.observe(_tracing.on.endpoint_removed, self._on_tracing_relation_broken) + except NameError as e: + logger.exception( + "Missing charm library, please run " + "`charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing`" + ) return _tracing def get_framework_config(self) -> BaseModel: @@ -455,7 +477,7 @@ def _create_charm_state(self) -> CharmState: if self._tracing and self._tracing.is_ready(): tracing_relation_data = TempoParameters( endpoint=f'{self._tracing.get_endpoint(protocol="otlp_http")}', - service_name=f"{self.framework.meta.name}-app", + service_name=self.framework.meta.name, ) return CharmState.from_charm( config=config, From d53b8a49e71af3298a79afe2a7e6351463edc66e Mon Sep 17 00:00:00 2001 From: ali ugur Date: Fri, 10 Jan 2025 08:46:58 +0300 Subject: [PATCH 26/70] chore(): Licence update --- docs/.readthedocs.yaml | 2 +- docs/.sphinx/_static/css/pdf.css | 4 ++-- docs/.sphinx/_templates/header.html | 4 ++-- docs/.sphinx/get_vale_conf.py | 2 +- docs/.sphinx/spellingcheck.yaml | 2 +- docs/Makefile | 2 +- docs/conf.py | 2 +- docs/doc-cheat-sheet.rst | 2 +- docs/explanation/index.rst | 3 +-- docs/how-to/contributing.rst | 2 +- docs/how-to/index.rst | 2 +- docs/index.rst | 6 +++--- docs/tutorial/index.rst | 4 +--- examples/django/charm/charmcraft.yaml | 2 +- .../charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py | 2 +- .../django/charm/lib/charms/traefik_k8s/v2/ingress.py | 2 +- examples/django/charm/src/charm.py | 2 +- .../django/django_app/django_app/django_app/__init__.py | 2 +- examples/django/django_app/django_app/django_app/asgi.py | 2 +- .../django/django_app/django_app/django_app/settings.py | 2 +- examples/django/django_app/django_app/django_app/urls.py | 2 +- examples/django/django_app/django_app/django_app/wsgi.py | 2 +- examples/django/django_app/django_app/manage.py | 2 +- examples/django/django_app/django_app/migrate.sh | 2 +- examples/django/django_app/django_app/testing/__init__.py | 2 +- examples/django/django_app/django_app/testing/admin.py | 2 +- examples/django/django_app/django_app/testing/apps.py | 2 +- .../django_app/django_app/testing/migrations/__init__.py | 2 +- examples/django/django_app/django_app/testing/models.py | 2 +- examples/django/django_app/django_app/testing/tests.py | 2 +- examples/django/django_app/django_app/testing/views.py | 2 +- examples/django/django_app/rockcraft.yaml | 2 +- .../django_async_app/django_async_app/__init__.py | 2 +- .../django_async_app/django_async_app/asgi.py | 2 +- .../django_async_app/django_async_app/settings.py | 2 +- .../django_async_app/django_async_app/urls.py | 2 +- .../django_async_app/django_async_app/wsgi.py | 2 +- .../django/django_async_app/django_async_app/manage.py | 2 +- .../django_async_app/django_async_app/testing/__init__.py | 2 +- .../django_async_app/django_async_app/testing/admin.py | 2 +- .../django_async_app/django_async_app/testing/apps.py | 2 +- .../django_async_app/testing/migrations/__init__.py | 2 +- .../django_async_app/django_async_app/testing/models.py | 2 +- .../django_async_app/django_async_app/testing/tests.py | 2 +- .../django_async_app/django_async_app/testing/views.py | 2 +- examples/django/django_async_app/rockcraft.yaml | 2 +- .../django_tracing_app/django_tracing_app/__init__.py | 2 +- .../django_tracing_app/django_tracing_app/asgi.py | 2 +- .../django_tracing_app/django_tracing_app/settings.py | 2 +- .../django_tracing_app/django_tracing_app/urls.py | 2 +- .../django_tracing_app/django_tracing_app/wsgi.py | 2 +- .../django_tracing_app/django_tracing_app/manage.py | 2 +- .../django_tracing_app/django_tracing_app/migrate.sh | 2 +- .../django_tracing_app/testing/__init__.py | 2 +- .../django_tracing_app/testing/admin.py | 2 +- .../django_tracing_app/django_tracing_app/testing/apps.py | 2 +- .../django_tracing_app/testing/migrations/__init__.py | 2 +- .../django_tracing_app/testing/models.py | 2 +- .../django_tracing_app/testing/tests.py | 2 +- .../django_tracing_app/testing/views.py | 2 +- examples/django/django_tracing_app/rockcraft.yaml | 2 +- examples/fastapi/charm/charmcraft.yaml | 8 ++++---- .../fastapi/charm/lib/charms/saml_integrator/v0/saml.py | 2 +- .../charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py | 2 +- .../fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py | 2 +- examples/fastapi/charm/src/charm.py | 2 +- examples/fastapi/fastapi_app/alembic.ini | 2 +- examples/fastapi/fastapi_app/alembic/env.py | 2 +- .../alembic/versions/eca6177bd16a_initial_migration.py | 4 ++-- examples/fastapi/fastapi_app/app.py | 2 +- examples/fastapi/fastapi_app/migrate.sh | 2 +- examples/fastapi/fastapi_app/rockcraft.yaml | 2 +- examples/fastapi/fastapi_tracing_app/alembic.ini | 2 +- examples/fastapi/fastapi_tracing_app/alembic/env.py | 2 +- .../alembic/versions/eca6177bd16a_initial_migration.py | 4 ++-- examples/fastapi/fastapi_tracing_app/app.py | 2 +- examples/fastapi/fastapi_tracing_app/migrate.sh | 2 +- examples/fastapi/fastapi_tracing_app/rockcraft.yaml | 2 +- examples/flask/charmcraft.yaml | 2 +- examples/flask/lib/charms/saml_integrator/v0/saml.py | 2 +- .../flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py | 2 +- examples/flask/lib/charms/traefik_k8s/v2/ingress.py | 2 +- examples/flask/src/charm.py | 2 +- examples/flask/test_async_rock/app.py | 2 +- examples/flask/test_async_rock/rockcraft.yaml | 2 +- examples/flask/test_db_rock/alembic.ini | 2 +- examples/flask/test_db_rock/alembic/env.py | 2 +- .../alembic/versions/eca6177bd16a_initial_migration.py | 4 ++-- examples/flask/test_db_rock/app.py | 2 +- examples/flask/test_db_rock/migrate.sh | 2 +- examples/flask/test_db_rock/rockcraft.yaml | 2 +- examples/flask/test_rock/app.py | 2 +- examples/flask/test_rock/rockcraft.yaml | 2 +- examples/flask/test_tracing_rock/app.py | 2 +- examples/flask/test_tracing_rock/rockcraft.yaml | 2 +- examples/go/charm/charmcraft.yaml | 8 ++++---- examples/go/charm/lib/charms/saml_integrator/v0/saml.py | 2 +- .../charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py | 2 +- examples/go/charm/lib/charms/traefik_k8s/v2/ingress.py | 2 +- examples/go/charm/src/charm.py | 2 +- examples/go/go_app/go.mod | 2 +- examples/go/go_app/internal/service/service.go | 2 +- examples/go/go_app/main.go | 2 +- examples/go/go_app/migrate.sh | 2 +- examples/go/go_app/rockcraft.yaml | 2 +- examples/go/go_tracing_app/go.mod | 2 +- examples/go/go_tracing_app/internal/service/service.go | 2 +- examples/go/go_tracing_app/main.go | 2 +- examples/go/go_tracing_app/migrate.sh | 2 +- examples/go/go_tracing_app/rockcraft.yaml | 2 +- localstack-installation.sh | 2 +- pyproject.toml | 2 +- setup.py | 2 +- src/paas_app_charmer/__init__.py | 2 +- src/paas_app_charmer/django/__init__.py | 2 +- src/paas_app_charmer/django/charm.py | 2 +- src/paas_app_charmer/fastapi/__init__.py | 2 +- src/paas_app_charmer/fastapi/charm.py | 2 +- src/paas_app_charmer/flask/__init__.py | 2 +- src/paas_app_charmer/flask/charm.py | 2 +- src/paas_app_charmer/go/__init__.py | 2 +- src/paas_app_charmer/go/charm.py | 2 +- src/paas_charm/__init__.py | 2 +- src/paas_charm/_gunicorn/__init__.py | 2 +- src/paas_charm/_gunicorn/charm.py | 2 +- src/paas_charm/_gunicorn/webserver.py | 2 +- src/paas_charm/_gunicorn/workload_config.py | 2 +- src/paas_charm/_gunicorn/wsgi_app.py | 2 +- src/paas_charm/app.py | 2 +- src/paas_charm/charm.py | 2 +- src/paas_charm/charm_state.py | 2 +- src/paas_charm/charm_utils.py | 2 +- src/paas_charm/database_migration.py | 2 +- src/paas_charm/databases.py | 2 +- src/paas_charm/django/__init__.py | 2 +- src/paas_charm/django/charm.py | 2 +- src/paas_charm/exceptions.py | 2 +- src/paas_charm/fastapi/__init__.py | 2 +- src/paas_charm/fastapi/charm.py | 2 +- src/paas_charm/flask/__init__.py | 2 +- src/paas_charm/flask/charm.py | 2 +- src/paas_charm/framework.py | 2 +- src/paas_charm/go/__init__.py | 2 +- src/paas_charm/go/charm.py | 2 +- src/paas_charm/observability.py | 2 +- src/paas_charm/rabbitmq.py | 2 +- src/paas_charm/secret_storage.py | 2 +- src/paas_charm/utils.py | 2 +- tests/__init__.py | 2 +- tests/conftest.py | 2 +- tests/integration/conftest.py | 2 +- tests/integration/django/__init__.py | 2 +- tests/integration/django/conftest.py | 2 +- tests/integration/django/test_django.py | 2 +- tests/integration/django/test_django_integrations.py | 2 +- tests/integration/django/test_workers.py | 2 +- tests/integration/fastapi/__init__.py | 2 +- tests/integration/fastapi/conftest.py | 2 +- tests/integration/fastapi/test_fastapi.py | 2 +- tests/integration/flask/__init__.py | 2 +- tests/integration/flask/conftest.py | 2 +- tests/integration/flask/test_charm.py | 2 +- tests/integration/flask/test_cos.py | 2 +- tests/integration/flask/test_database.py | 2 +- tests/integration/flask/test_db_migration.py | 2 +- tests/integration/flask/test_integrations.py | 2 +- tests/integration/flask/test_proxy.py | 2 +- tests/integration/flask/test_workers.py | 2 +- tests/integration/flask/test_workload_tracing.py | 2 +- tests/integration/go/__init__.py | 2 +- tests/integration/go/conftest.py | 2 +- tests/integration/go/test_go.py | 2 +- tests/integration/helpers.py | 2 +- tests/unit/__init__.py | 2 +- tests/unit/django/__init__.py | 2 +- tests/unit/django/conftest.py | 2 +- tests/unit/django/constants.py | 2 +- tests/unit/django/test_charm.py | 2 +- tests/unit/django/test_workers.py | 2 +- tests/unit/fastapi/__init__.py | 2 +- tests/unit/fastapi/conftest.py | 2 +- tests/unit/fastapi/constants.py | 2 +- tests/unit/fastapi/test_charm.py | 2 +- tests/unit/flask/__init__.py | 2 +- tests/unit/flask/conftest.py | 2 +- tests/unit/flask/constants.py | 2 +- tests/unit/flask/test_charm.py | 2 +- tests/unit/flask/test_charm_state.py | 2 +- tests/unit/flask/test_database_migration.py | 2 +- tests/unit/flask/test_databases.py | 2 +- tests/unit/flask/test_flask_app.py | 2 +- tests/unit/flask/test_tracing.py | 2 +- tests/unit/flask/test_webserver.py | 2 +- tests/unit/flask/test_workers.py | 2 +- tests/unit/go/__init__.py | 2 +- tests/unit/go/conftest.py | 2 +- tests/unit/go/constants.py | 2 +- tests/unit/go/test_app.py | 2 +- tests/unit/go/test_charm.py | 2 +- tests/unit/test_deprecated.py | 2 +- tox.ini | 2 +- 201 files changed, 214 insertions(+), 217 deletions(-) diff --git a/docs/.readthedocs.yaml b/docs/.readthedocs.yaml index b5ca6d4..1f6c666 100644 --- a/docs/.readthedocs.yaml +++ b/docs/.readthedocs.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. # .readthedocs.yaml diff --git a/docs/.sphinx/_static/css/pdf.css b/docs/.sphinx/_static/css/pdf.css index a7a210d..14b3c26 100644 --- a/docs/.sphinx/_static/css/pdf.css +++ b/docs/.sphinx/_static/css/pdf.css @@ -1,5 +1,5 @@ /* - * Copyright 2024 Canonical Ltd. + * Copyright 2025 Canonical Ltd. * See LICENSE file for licensing details. */ @@ -17,4 +17,4 @@ margin-left: -0.5rem; padding-left: .5rem; padding-right: .5rem; -} \ No newline at end of file +} diff --git a/docs/.sphinx/_templates/header.html b/docs/.sphinx/_templates/header.html index c937139..b061863 100644 --- a/docs/.sphinx/_templates/header.html +++ b/docs/.sphinx/_templates/header.html @@ -1,5 +1,5 @@ @@ -54,4 +54,4 @@ - \ No newline at end of file + diff --git a/docs/.sphinx/get_vale_conf.py b/docs/.sphinx/get_vale_conf.py index e7a11e8..2c07348 100644 --- a/docs/.sphinx/get_vale_conf.py +++ b/docs/.sphinx/get_vale_conf.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. #! /usr/bin/env python diff --git a/docs/.sphinx/spellingcheck.yaml b/docs/.sphinx/spellingcheck.yaml index f35996b..dcf26be 100644 --- a/docs/.sphinx/spellingcheck.yaml +++ b/docs/.sphinx/spellingcheck.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. matrix: diff --git a/docs/Makefile b/docs/Makefile index 76f2e46..89d6a3a 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. # This Makefile stub allows you to customize starter pack (SP) targets. diff --git a/docs/conf.py b/docs/conf.py index 95b53c1..e64a255 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import datetime diff --git a/docs/doc-cheat-sheet.rst b/docs/doc-cheat-sheet.rst index b3bf401..02e4b09 100644 --- a/docs/doc-cheat-sheet.rst +++ b/docs/doc-cheat-sheet.rst @@ -1,4 +1,4 @@ -.. Copyright 2024 Canonical Ltd. +.. Copyright 2025 Canonical Ltd. .. See LICENSE file for licensing details. :orphan: diff --git a/docs/explanation/index.rst b/docs/explanation/index.rst index d7d9bf3..f64fc98 100644 --- a/docs/explanation/index.rst +++ b/docs/explanation/index.rst @@ -1,4 +1,4 @@ -.. Copyright 2024 Canonical Ltd. +.. Copyright 2025 Canonical Ltd. .. See LICENSE file for licensing details. Explanation @@ -10,4 +10,3 @@ on key topics related to the use and configuration of .. toctree:: :maxdepth: 1 - diff --git a/docs/how-to/contributing.rst b/docs/how-to/contributing.rst index 7f2b7e2..d124f1c 100644 --- a/docs/how-to/contributing.rst +++ b/docs/how-to/contributing.rst @@ -1,4 +1,4 @@ -.. Copyright 2024 Canonical Ltd. +.. Copyright 2025 Canonical Ltd. .. See LICENSE file for licensing details. .. TODO: Replace all mentions of ACME with your project name diff --git a/docs/how-to/index.rst b/docs/how-to/index.rst index c7d0b4d..cbeba08 100644 --- a/docs/how-to/index.rst +++ b/docs/how-to/index.rst @@ -1,4 +1,4 @@ -.. Copyright 2024 Canonical Ltd. +.. Copyright 2025 Canonical Ltd. .. See LICENSE file for licensing details. How-to guides diff --git a/docs/index.rst b/docs/index.rst index 5a0abac..dac722c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,4 +1,4 @@ -.. Copyright 2024 Canonical Ltd. +.. Copyright 2025 Canonical Ltd. .. See LICENSE file for licensing details. 12-Factor app support in Charmcraft and Rockcraft @@ -8,12 +8,12 @@ the content you need to generate rocks and charms for 12-Factor applications.** **A paragraph of one to three short sentences, that describe what the product -does.** +does.** **A third paragraph of similar length, this time explaining what need the product meets.** -**Finally, a paragraph that describes whom the product is useful for.** +**Finally, a paragraph that describes whom the product is useful for.** --------- diff --git a/docs/tutorial/index.rst b/docs/tutorial/index.rst index 6f18154..a5b5234 100644 --- a/docs/tutorial/index.rst +++ b/docs/tutorial/index.rst @@ -1,4 +1,4 @@ -.. Copyright 2024 Canonical Ltd. +.. Copyright 2025 Canonical Ltd. .. See LICENSE file for licensing details. Tutorials @@ -8,5 +8,3 @@ Developer tutorials for the 12-factor app project. .. toctree:: :maxdepth: 1 - - diff --git a/examples/django/charm/charmcraft.yaml b/examples/django/charm/charmcraft.yaml index b6884c3..966e487 100644 --- a/examples/django/charm/charmcraft.yaml +++ b/examples/django/charm/charmcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: django-k8s diff --git a/examples/django/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/django/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py index 1dd78b5..e64c468 100644 --- a/examples/django/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py +++ b/examples/django/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """## Overview. diff --git a/examples/django/charm/lib/charms/traefik_k8s/v2/ingress.py b/examples/django/charm/lib/charms/traefik_k8s/v2/ingress.py index 407cfb5..582a31f 100644 --- a/examples/django/charm/lib/charms/traefik_k8s/v2/ingress.py +++ b/examples/django/charm/lib/charms/traefik_k8s/v2/ingress.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. r"""# Interface Library for ingress. diff --git a/examples/django/charm/src/charm.py b/examples/django/charm/src/charm.py index 5199852..f0067dc 100755 --- a/examples/django/charm/src/charm.py +++ b/examples/django/charm/src/charm.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Django Charm service.""" diff --git a/examples/django/django_app/django_app/django_app/__init__.py b/examples/django/django_app/django_app/django_app/__init__.py index e3979c0..dddb292 100644 --- a/examples/django/django_app/django_app/django_app/__init__.py +++ b/examples/django/django_app/django_app/django_app/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/examples/django/django_app/django_app/django_app/asgi.py b/examples/django/django_app/django_app/django_app/asgi.py index 1616128..adc322c 100644 --- a/examples/django/django_app/django_app/django_app/asgi.py +++ b/examples/django/django_app/django_app/django_app/asgi.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """ diff --git a/examples/django/django_app/django_app/django_app/settings.py b/examples/django/django_app/django_app/django_app/settings.py index 22e0102..c8e025c 100644 --- a/examples/django/django_app/django_app/django_app/settings.py +++ b/examples/django/django_app/django_app/django_app/settings.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """ diff --git a/examples/django/django_app/django_app/django_app/urls.py b/examples/django/django_app/django_app/django_app/urls.py index 916b144..f43a34e 100644 --- a/examples/django/django_app/django_app/django_app/urls.py +++ b/examples/django/django_app/django_app/django_app/urls.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """ diff --git a/examples/django/django_app/django_app/django_app/wsgi.py b/examples/django/django_app/django_app/django_app/wsgi.py index f422221..bfeb97b 100644 --- a/examples/django/django_app/django_app/django_app/wsgi.py +++ b/examples/django/django_app/django_app/django_app/wsgi.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """ diff --git a/examples/django/django_app/django_app/manage.py b/examples/django/django_app/django_app/manage.py index cf94f1f..0f66a2d 100755 --- a/examples/django/django_app/django_app/manage.py +++ b/examples/django/django_app/django_app/manage.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Django's command-line utility for administrative tasks.""" diff --git a/examples/django/django_app/django_app/migrate.sh b/examples/django/django_app/django_app/migrate.sh index ce3a73c..6aaeec6 100644 --- a/examples/django/django_app/django_app/migrate.sh +++ b/examples/django/django_app/django_app/migrate.sh @@ -1,5 +1,5 @@ #!/usr/bin/bash -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. python3 manage.py migrate diff --git a/examples/django/django_app/django_app/testing/__init__.py b/examples/django/django_app/django_app/testing/__init__.py index e3979c0..dddb292 100644 --- a/examples/django/django_app/django_app/testing/__init__.py +++ b/examples/django/django_app/django_app/testing/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/examples/django/django_app/django_app/testing/admin.py b/examples/django/django_app/django_app/testing/admin.py index b111777..56e199a 100644 --- a/examples/django/django_app/django_app/testing/admin.py +++ b/examples/django/django_app/django_app/testing/admin.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from django.contrib import admin diff --git a/examples/django/django_app/django_app/testing/apps.py b/examples/django/django_app/django_app/testing/apps.py index f435e0c..6eb0613 100644 --- a/examples/django/django_app/django_app/testing/apps.py +++ b/examples/django/django_app/django_app/testing/apps.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from django.apps import AppConfig diff --git a/examples/django/django_app/django_app/testing/migrations/__init__.py b/examples/django/django_app/django_app/testing/migrations/__init__.py index e3979c0..dddb292 100644 --- a/examples/django/django_app/django_app/testing/migrations/__init__.py +++ b/examples/django/django_app/django_app/testing/migrations/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/examples/django/django_app/django_app/testing/models.py b/examples/django/django_app/django_app/testing/models.py index dde0a81..d709735 100644 --- a/examples/django/django_app/django_app/testing/models.py +++ b/examples/django/django_app/django_app/testing/models.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from django.db import models diff --git a/examples/django/django_app/django_app/testing/tests.py b/examples/django/django_app/django_app/testing/tests.py index 922bda5..b62faeb 100644 --- a/examples/django/django_app/django_app/testing/tests.py +++ b/examples/django/django_app/django_app/testing/tests.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from django.test import TestCase diff --git a/examples/django/django_app/django_app/testing/views.py b/examples/django/django_app/django_app/testing/views.py index 07b39f7..9001f17 100644 --- a/examples/django/django_app/django_app/testing/views.py +++ b/examples/django/django_app/django_app/testing/views.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import os diff --git a/examples/django/django_app/rockcraft.yaml b/examples/django/django_app/rockcraft.yaml index 5405246..5084b09 100644 --- a/examples/django/django_app/rockcraft.yaml +++ b/examples/django/django_app/rockcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: django-app diff --git a/examples/django/django_async_app/django_async_app/django_async_app/__init__.py b/examples/django/django_async_app/django_async_app/django_async_app/__init__.py index e3979c0..dddb292 100644 --- a/examples/django/django_async_app/django_async_app/django_async_app/__init__.py +++ b/examples/django/django_async_app/django_async_app/django_async_app/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/examples/django/django_async_app/django_async_app/django_async_app/asgi.py b/examples/django/django_async_app/django_async_app/django_async_app/asgi.py index 76f2304..83dbc7c 100644 --- a/examples/django/django_async_app/django_async_app/django_async_app/asgi.py +++ b/examples/django/django_async_app/django_async_app/django_async_app/asgi.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """ diff --git a/examples/django/django_async_app/django_async_app/django_async_app/settings.py b/examples/django/django_async_app/django_async_app/django_async_app/settings.py index 614e67f..35f7911 100644 --- a/examples/django/django_async_app/django_async_app/django_async_app/settings.py +++ b/examples/django/django_async_app/django_async_app/django_async_app/settings.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """ diff --git a/examples/django/django_async_app/django_async_app/django_async_app/urls.py b/examples/django/django_async_app/django_async_app/django_async_app/urls.py index ba482f2..818d142 100644 --- a/examples/django/django_async_app/django_async_app/django_async_app/urls.py +++ b/examples/django/django_async_app/django_async_app/django_async_app/urls.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """ diff --git a/examples/django/django_async_app/django_async_app/django_async_app/wsgi.py b/examples/django/django_async_app/django_async_app/django_async_app/wsgi.py index 968f6f5..5bdcf5c 100644 --- a/examples/django/django_async_app/django_async_app/django_async_app/wsgi.py +++ b/examples/django/django_async_app/django_async_app/django_async_app/wsgi.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """ diff --git a/examples/django/django_async_app/django_async_app/manage.py b/examples/django/django_async_app/django_async_app/manage.py index 6c134b1..cff8583 100755 --- a/examples/django/django_async_app/django_async_app/manage.py +++ b/examples/django/django_async_app/django_async_app/manage.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Django's command-line utility for administrative tasks.""" diff --git a/examples/django/django_async_app/django_async_app/testing/__init__.py b/examples/django/django_async_app/django_async_app/testing/__init__.py index e3979c0..dddb292 100644 --- a/examples/django/django_async_app/django_async_app/testing/__init__.py +++ b/examples/django/django_async_app/django_async_app/testing/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/examples/django/django_async_app/django_async_app/testing/admin.py b/examples/django/django_async_app/django_async_app/testing/admin.py index b111777..56e199a 100644 --- a/examples/django/django_async_app/django_async_app/testing/admin.py +++ b/examples/django/django_async_app/django_async_app/testing/admin.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from django.contrib import admin diff --git a/examples/django/django_async_app/django_async_app/testing/apps.py b/examples/django/django_async_app/django_async_app/testing/apps.py index f435e0c..6eb0613 100644 --- a/examples/django/django_async_app/django_async_app/testing/apps.py +++ b/examples/django/django_async_app/django_async_app/testing/apps.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from django.apps import AppConfig diff --git a/examples/django/django_async_app/django_async_app/testing/migrations/__init__.py b/examples/django/django_async_app/django_async_app/testing/migrations/__init__.py index e3979c0..dddb292 100644 --- a/examples/django/django_async_app/django_async_app/testing/migrations/__init__.py +++ b/examples/django/django_async_app/django_async_app/testing/migrations/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/examples/django/django_async_app/django_async_app/testing/models.py b/examples/django/django_async_app/django_async_app/testing/models.py index dde0a81..d709735 100644 --- a/examples/django/django_async_app/django_async_app/testing/models.py +++ b/examples/django/django_async_app/django_async_app/testing/models.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from django.db import models diff --git a/examples/django/django_async_app/django_async_app/testing/tests.py b/examples/django/django_async_app/django_async_app/testing/tests.py index 922bda5..b62faeb 100644 --- a/examples/django/django_async_app/django_async_app/testing/tests.py +++ b/examples/django/django_async_app/django_async_app/testing/tests.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from django.test import TestCase diff --git a/examples/django/django_async_app/django_async_app/testing/views.py b/examples/django/django_async_app/django_async_app/testing/views.py index 149129c..56bb7ec 100644 --- a/examples/django/django_async_app/django_async_app/testing/views.py +++ b/examples/django/django_async_app/django_async_app/testing/views.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import time diff --git a/examples/django/django_async_app/rockcraft.yaml b/examples/django/django_async_app/rockcraft.yaml index 98b9e97..0c65bc0 100644 --- a/examples/django/django_async_app/rockcraft.yaml +++ b/examples/django/django_async_app/rockcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: django-async-app diff --git a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/__init__.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/__init__.py index e3979c0..dddb292 100644 --- a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/__init__.py +++ b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/asgi.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/asgi.py index cc727ed..b31b018 100644 --- a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/asgi.py +++ b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/asgi.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """ diff --git a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/settings.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/settings.py index b2079a0..9a63aa2 100644 --- a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/settings.py +++ b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/settings.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """ diff --git a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py index 0a05d9f..86e13e8 100644 --- a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py +++ b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """ diff --git a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/wsgi.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/wsgi.py index 4c27f8f..e3bced4 100644 --- a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/wsgi.py +++ b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/wsgi.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """ diff --git a/examples/django/django_tracing_app/django_tracing_app/manage.py b/examples/django/django_tracing_app/django_tracing_app/manage.py index 78d1aca..cf833a2 100755 --- a/examples/django/django_tracing_app/django_tracing_app/manage.py +++ b/examples/django/django_tracing_app/django_tracing_app/manage.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Django's command-line utility for administrative tasks.""" diff --git a/examples/django/django_tracing_app/django_tracing_app/migrate.sh b/examples/django/django_tracing_app/django_tracing_app/migrate.sh index ce3a73c..6aaeec6 100644 --- a/examples/django/django_tracing_app/django_tracing_app/migrate.sh +++ b/examples/django/django_tracing_app/django_tracing_app/migrate.sh @@ -1,5 +1,5 @@ #!/usr/bin/bash -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. python3 manage.py migrate diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/__init__.py b/examples/django/django_tracing_app/django_tracing_app/testing/__init__.py index e3979c0..dddb292 100644 --- a/examples/django/django_tracing_app/django_tracing_app/testing/__init__.py +++ b/examples/django/django_tracing_app/django_tracing_app/testing/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/admin.py b/examples/django/django_tracing_app/django_tracing_app/testing/admin.py index b111777..56e199a 100644 --- a/examples/django/django_tracing_app/django_tracing_app/testing/admin.py +++ b/examples/django/django_tracing_app/django_tracing_app/testing/admin.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from django.contrib import admin diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/apps.py b/examples/django/django_tracing_app/django_tracing_app/testing/apps.py index f435e0c..6eb0613 100644 --- a/examples/django/django_tracing_app/django_tracing_app/testing/apps.py +++ b/examples/django/django_tracing_app/django_tracing_app/testing/apps.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from django.apps import AppConfig diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/migrations/__init__.py b/examples/django/django_tracing_app/django_tracing_app/testing/migrations/__init__.py index e3979c0..dddb292 100644 --- a/examples/django/django_tracing_app/django_tracing_app/testing/migrations/__init__.py +++ b/examples/django/django_tracing_app/django_tracing_app/testing/migrations/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/models.py b/examples/django/django_tracing_app/django_tracing_app/testing/models.py index dde0a81..d709735 100644 --- a/examples/django/django_tracing_app/django_tracing_app/testing/models.py +++ b/examples/django/django_tracing_app/django_tracing_app/testing/models.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from django.db import models diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/tests.py b/examples/django/django_tracing_app/django_tracing_app/testing/tests.py index 922bda5..b62faeb 100644 --- a/examples/django/django_tracing_app/django_tracing_app/testing/tests.py +++ b/examples/django/django_tracing_app/django_tracing_app/testing/tests.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from django.test import TestCase diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/views.py b/examples/django/django_tracing_app/django_tracing_app/testing/views.py index 991b30c..ca83a1b 100644 --- a/examples/django/django_tracing_app/django_tracing_app/testing/views.py +++ b/examples/django/django_tracing_app/django_tracing_app/testing/views.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import os diff --git a/examples/django/django_tracing_app/rockcraft.yaml b/examples/django/django_tracing_app/rockcraft.yaml index 7834eac..3b165bd 100644 --- a/examples/django/django_tracing_app/rockcraft.yaml +++ b/examples/django/django_tracing_app/rockcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: django-tracing-app diff --git a/examples/fastapi/charm/charmcraft.yaml b/examples/fastapi/charm/charmcraft.yaml index fc5d4df..63ca113 100644 --- a/examples/fastapi/charm/charmcraft.yaml +++ b/examples/fastapi/charm/charmcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: fastapi-k8s @@ -50,9 +50,9 @@ config: app-secret-key-id: type: secret description: >- - This configuration is similar to `app-secret-key`, but instead accepts a Juju user secret ID. - The secret should contain a single key, "value", which maps to the actual secret key. - To create the secret, run the following command: + This configuration is similar to `app-secret-key`, but instead accepts a Juju user secret ID. + The secret should contain a single key, "value", which maps to the actual secret key. + To create the secret, run the following command: `juju add-secret my-secret-key value= && juju grant-secret my-secret-key fastapi-k8s`, and use the outputted secret ID to configure this option. user-defined-config: diff --git a/examples/fastapi/charm/lib/charms/saml_integrator/v0/saml.py b/examples/fastapi/charm/lib/charms/saml_integrator/v0/saml.py index 8fd1610..9012093 100644 --- a/examples/fastapi/charm/lib/charms/saml_integrator/v0/saml.py +++ b/examples/fastapi/charm/lib/charms/saml_integrator/v0/saml.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # Licensed under the Apache2.0. See LICENSE file in charm source for details. """Library to manage the relation data for the SAML Integrator charm. diff --git a/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py index 1dd78b5..e64c468 100644 --- a/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py +++ b/examples/fastapi/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """## Overview. diff --git a/examples/fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py b/examples/fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py index 407cfb5..582a31f 100644 --- a/examples/fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py +++ b/examples/fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. r"""# Interface Library for ingress. diff --git a/examples/fastapi/charm/src/charm.py b/examples/fastapi/charm/src/charm.py index 63b54c1..80b2fe0 100755 --- a/examples/fastapi/charm/src/charm.py +++ b/examples/fastapi/charm/src/charm.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """FastAPI Charm service.""" diff --git a/examples/fastapi/fastapi_app/alembic.ini b/examples/fastapi/fastapi_app/alembic.ini index 353b047..c08a8b6 100644 --- a/examples/fastapi/fastapi_app/alembic.ini +++ b/examples/fastapi/fastapi_app/alembic.ini @@ -1,4 +1,4 @@ -; Copyright 2024 Canonical Ltd. +; Copyright 2025 Canonical Ltd. ; See LICENSE file for licensing details. [alembic] diff --git a/examples/fastapi/fastapi_app/alembic/env.py b/examples/fastapi/fastapi_app/alembic/env.py index 63e0974..4d8d851 100644 --- a/examples/fastapi/fastapi_app/alembic/env.py +++ b/examples/fastapi/fastapi_app/alembic/env.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import os diff --git a/examples/fastapi/fastapi_app/alembic/versions/eca6177bd16a_initial_migration.py b/examples/fastapi/fastapi_app/alembic/versions/eca6177bd16a_initial_migration.py index 18158e6..7838caf 100644 --- a/examples/fastapi/fastapi_app/alembic/versions/eca6177bd16a_initial_migration.py +++ b/examples/fastapi/fastapi_app/alembic/versions/eca6177bd16a_initial_migration.py @@ -1,10 +1,10 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Initial migration Revision ID: eca6177bd16a -Revises: +Revises: Create Date: 2023-09-05 17:12:56.303534 """ diff --git a/examples/fastapi/fastapi_app/app.py b/examples/fastapi/fastapi_app/app.py index 5f46f85..7ff5861 100644 --- a/examples/fastapi/fastapi_app/app.py +++ b/examples/fastapi/fastapi_app/app.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import os diff --git a/examples/fastapi/fastapi_app/migrate.sh b/examples/fastapi/fastapi_app/migrate.sh index 1a91ca1..18ad323 100644 --- a/examples/fastapi/fastapi_app/migrate.sh +++ b/examples/fastapi/fastapi_app/migrate.sh @@ -1,5 +1,5 @@ #! /usr/bin/env bash -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. alembic upgrade head diff --git a/examples/fastapi/fastapi_app/rockcraft.yaml b/examples/fastapi/fastapi_app/rockcraft.yaml index ecdcab3..a7942d4 100644 --- a/examples/fastapi/fastapi_app/rockcraft.yaml +++ b/examples/fastapi/fastapi_app/rockcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: fastapi-app diff --git a/examples/fastapi/fastapi_tracing_app/alembic.ini b/examples/fastapi/fastapi_tracing_app/alembic.ini index 353b047..c08a8b6 100644 --- a/examples/fastapi/fastapi_tracing_app/alembic.ini +++ b/examples/fastapi/fastapi_tracing_app/alembic.ini @@ -1,4 +1,4 @@ -; Copyright 2024 Canonical Ltd. +; Copyright 2025 Canonical Ltd. ; See LICENSE file for licensing details. [alembic] diff --git a/examples/fastapi/fastapi_tracing_app/alembic/env.py b/examples/fastapi/fastapi_tracing_app/alembic/env.py index 63e0974..4d8d851 100644 --- a/examples/fastapi/fastapi_tracing_app/alembic/env.py +++ b/examples/fastapi/fastapi_tracing_app/alembic/env.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import os diff --git a/examples/fastapi/fastapi_tracing_app/alembic/versions/eca6177bd16a_initial_migration.py b/examples/fastapi/fastapi_tracing_app/alembic/versions/eca6177bd16a_initial_migration.py index 18158e6..7838caf 100644 --- a/examples/fastapi/fastapi_tracing_app/alembic/versions/eca6177bd16a_initial_migration.py +++ b/examples/fastapi/fastapi_tracing_app/alembic/versions/eca6177bd16a_initial_migration.py @@ -1,10 +1,10 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Initial migration Revision ID: eca6177bd16a -Revises: +Revises: Create Date: 2023-09-05 17:12:56.303534 """ diff --git a/examples/fastapi/fastapi_tracing_app/app.py b/examples/fastapi/fastapi_tracing_app/app.py index 996a9cb..e41fa18 100644 --- a/examples/fastapi/fastapi_tracing_app/app.py +++ b/examples/fastapi/fastapi_tracing_app/app.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import os diff --git a/examples/fastapi/fastapi_tracing_app/migrate.sh b/examples/fastapi/fastapi_tracing_app/migrate.sh index 1a91ca1..18ad323 100644 --- a/examples/fastapi/fastapi_tracing_app/migrate.sh +++ b/examples/fastapi/fastapi_tracing_app/migrate.sh @@ -1,5 +1,5 @@ #! /usr/bin/env bash -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. alembic upgrade head diff --git a/examples/fastapi/fastapi_tracing_app/rockcraft.yaml b/examples/fastapi/fastapi_tracing_app/rockcraft.yaml index 088e5e1..572a091 100644 --- a/examples/fastapi/fastapi_tracing_app/rockcraft.yaml +++ b/examples/fastapi/fastapi_tracing_app/rockcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: fastapi-tracing-app diff --git a/examples/flask/charmcraft.yaml b/examples/flask/charmcraft.yaml index fb93d5e..7fc326f 100644 --- a/examples/flask/charmcraft.yaml +++ b/examples/flask/charmcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: flask-k8s summary: Turn any Flask application into a charm. diff --git a/examples/flask/lib/charms/saml_integrator/v0/saml.py b/examples/flask/lib/charms/saml_integrator/v0/saml.py index be555dc..722b1c6 100644 --- a/examples/flask/lib/charms/saml_integrator/v0/saml.py +++ b/examples/flask/lib/charms/saml_integrator/v0/saml.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # Licensed under the Apache2.0. See LICENSE file in charm source for details. """Library to manage the relation data for the SAML Integrator charm. diff --git a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py index 4516af6..363828d 100644 --- a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py +++ b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """## Overview. diff --git a/examples/flask/lib/charms/traefik_k8s/v2/ingress.py b/examples/flask/lib/charms/traefik_k8s/v2/ingress.py index 407cfb5..582a31f 100644 --- a/examples/flask/lib/charms/traefik_k8s/v2/ingress.py +++ b/examples/flask/lib/charms/traefik_k8s/v2/ingress.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. r"""# Interface Library for ingress. diff --git a/examples/flask/src/charm.py b/examples/flask/src/charm.py index 1274e26..d10edb4 100755 --- a/examples/flask/src/charm.py +++ b/examples/flask/src/charm.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Flask Charm service.""" diff --git a/examples/flask/test_async_rock/app.py b/examples/flask/test_async_rock/app.py index ec6131a..09e0930 100644 --- a/examples/flask/test_async_rock/app.py +++ b/examples/flask/test_async_rock/app.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import logging diff --git a/examples/flask/test_async_rock/rockcraft.yaml b/examples/flask/test_async_rock/rockcraft.yaml index 9c789eb..f2b0ee2 100644 --- a/examples/flask/test_async_rock/rockcraft.yaml +++ b/examples/flask/test_async_rock/rockcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: test-async-flask summary: A flask async worker test app diff --git a/examples/flask/test_db_rock/alembic.ini b/examples/flask/test_db_rock/alembic.ini index 353b047..c08a8b6 100644 --- a/examples/flask/test_db_rock/alembic.ini +++ b/examples/flask/test_db_rock/alembic.ini @@ -1,4 +1,4 @@ -; Copyright 2024 Canonical Ltd. +; Copyright 2025 Canonical Ltd. ; See LICENSE file for licensing details. [alembic] diff --git a/examples/flask/test_db_rock/alembic/env.py b/examples/flask/test_db_rock/alembic/env.py index 63e0974..4d8d851 100644 --- a/examples/flask/test_db_rock/alembic/env.py +++ b/examples/flask/test_db_rock/alembic/env.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import os diff --git a/examples/flask/test_db_rock/alembic/versions/eca6177bd16a_initial_migration.py b/examples/flask/test_db_rock/alembic/versions/eca6177bd16a_initial_migration.py index 18158e6..7838caf 100644 --- a/examples/flask/test_db_rock/alembic/versions/eca6177bd16a_initial_migration.py +++ b/examples/flask/test_db_rock/alembic/versions/eca6177bd16a_initial_migration.py @@ -1,10 +1,10 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Initial migration Revision ID: eca6177bd16a -Revises: +Revises: Create Date: 2023-09-05 17:12:56.303534 """ diff --git a/examples/flask/test_db_rock/app.py b/examples/flask/test_db_rock/app.py index 866e989..b2dba5d 100644 --- a/examples/flask/test_db_rock/app.py +++ b/examples/flask/test_db_rock/app.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import os diff --git a/examples/flask/test_db_rock/migrate.sh b/examples/flask/test_db_rock/migrate.sh index 1a91ca1..18ad323 100644 --- a/examples/flask/test_db_rock/migrate.sh +++ b/examples/flask/test_db_rock/migrate.sh @@ -1,5 +1,5 @@ #! /usr/bin/env bash -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. alembic upgrade head diff --git a/examples/flask/test_db_rock/rockcraft.yaml b/examples/flask/test_db_rock/rockcraft.yaml index 5fc0dfb..bc3fefa 100644 --- a/examples/flask/test_db_rock/rockcraft.yaml +++ b/examples/flask/test_db_rock/rockcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: test-db-flask summary: Default Flask application image. diff --git a/examples/flask/test_rock/app.py b/examples/flask/test_rock/app.py index 7d0b087..9527532 100644 --- a/examples/flask/test_rock/app.py +++ b/examples/flask/test_rock/app.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import logging diff --git a/examples/flask/test_rock/rockcraft.yaml b/examples/flask/test_rock/rockcraft.yaml index 8dcdf56..0e8eafa 100644 --- a/examples/flask/test_rock/rockcraft.yaml +++ b/examples/flask/test_rock/rockcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: test-flask summary: A flask test app diff --git a/examples/flask/test_tracing_rock/app.py b/examples/flask/test_tracing_rock/app.py index 57bafba..9e8e117 100644 --- a/examples/flask/test_tracing_rock/app.py +++ b/examples/flask/test_tracing_rock/app.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import logging diff --git a/examples/flask/test_tracing_rock/rockcraft.yaml b/examples/flask/test_tracing_rock/rockcraft.yaml index ec722b8..6a7f269 100644 --- a/examples/flask/test_tracing_rock/rockcraft.yaml +++ b/examples/flask/test_tracing_rock/rockcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: test-tracing-flask summary: A flask test app diff --git a/examples/go/charm/charmcraft.yaml b/examples/go/charm/charmcraft.yaml index 06bd01a..90e2ac2 100644 --- a/examples/go/charm/charmcraft.yaml +++ b/examples/go/charm/charmcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: go-k8s @@ -41,9 +41,9 @@ config: app-secret-key-id: type: secret description: >- - This configuration is similar to `app-secret-key`, but instead accepts a Juju user secret ID. - The secret should contain a single key, "value", which maps to the actual secret key. - To create the secret, run the following command: + This configuration is similar to `app-secret-key`, but instead accepts a Juju user secret ID. + The secret should contain a single key, "value", which maps to the actual secret key. + To create the secret, run the following command: `juju add-secret my-secret-key value= && juju grant-secret my-secret-key go-k8s`, and use the outputted secret ID to configure this option. user-defined-config: diff --git a/examples/go/charm/lib/charms/saml_integrator/v0/saml.py b/examples/go/charm/lib/charms/saml_integrator/v0/saml.py index be555dc..722b1c6 100644 --- a/examples/go/charm/lib/charms/saml_integrator/v0/saml.py +++ b/examples/go/charm/lib/charms/saml_integrator/v0/saml.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # Licensed under the Apache2.0. See LICENSE file in charm source for details. """Library to manage the relation data for the SAML Integrator charm. diff --git a/examples/go/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/go/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py index 1dd78b5..e64c468 100644 --- a/examples/go/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py +++ b/examples/go/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """## Overview. diff --git a/examples/go/charm/lib/charms/traefik_k8s/v2/ingress.py b/examples/go/charm/lib/charms/traefik_k8s/v2/ingress.py index 407cfb5..582a31f 100644 --- a/examples/go/charm/lib/charms/traefik_k8s/v2/ingress.py +++ b/examples/go/charm/lib/charms/traefik_k8s/v2/ingress.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. r"""# Interface Library for ingress. diff --git a/examples/go/charm/src/charm.py b/examples/go/charm/src/charm.py index 0336590..8770395 100755 --- a/examples/go/charm/src/charm.py +++ b/examples/go/charm/src/charm.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Go Charm service.""" diff --git a/examples/go/go_app/go.mod b/examples/go/go_app/go.mod index 1f3436b..6bd1996 100644 --- a/examples/go/go_app/go.mod +++ b/examples/go/go_app/go.mod @@ -1,4 +1,4 @@ -// Copyright 2024 Canonical Ltd. +// Copyright 2025 Canonical Ltd. // See LICENSE file for licensing details. module go-app diff --git a/examples/go/go_app/internal/service/service.go b/examples/go/go_app/internal/service/service.go index 9426d64..ed53d24 100644 --- a/examples/go/go_app/internal/service/service.go +++ b/examples/go/go_app/internal/service/service.go @@ -1,4 +1,4 @@ -// Copyright 2024 Canonical Ltd. +// Copyright 2025 Canonical Ltd. // See LICENSE file for licensing details. package service diff --git a/examples/go/go_app/main.go b/examples/go/go_app/main.go index 4c751c1..ce92162 100644 --- a/examples/go/go_app/main.go +++ b/examples/go/go_app/main.go @@ -1,4 +1,4 @@ -// Copyright 2024 Canonical Ltd. +// Copyright 2025 Canonical Ltd. // See LICENSE file for licensing details. package main diff --git a/examples/go/go_app/migrate.sh b/examples/go/go_app/migrate.sh index 5a0f850..80b4034 100755 --- a/examples/go/go_app/migrate.sh +++ b/examples/go/go_app/migrate.sh @@ -1,5 +1,5 @@ #!/bin/sh -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. PGPASSWORD="${POSTGRESQL_DB_PASSWORD}" psql -h "${POSTGRESQL_DB_HOSTNAME}" -U "${POSTGRESQL_DB_USERNAME}" "${POSTGRESQL_DB_NAME}" -c "CREATE TABLE IF NOT EXISTS USERS(NAME CHAR(50));" diff --git a/examples/go/go_app/rockcraft.yaml b/examples/go/go_app/rockcraft.yaml index 714bf66..3c94c17 100644 --- a/examples/go/go_app/rockcraft.yaml +++ b/examples/go/go_app/rockcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: go-app # see https://documentation.ubuntu.com/rockcraft/en/stable/explanation/bases/ diff --git a/examples/go/go_tracing_app/go.mod b/examples/go/go_tracing_app/go.mod index 2a01308..5b7da97 100644 --- a/examples/go/go_tracing_app/go.mod +++ b/examples/go/go_tracing_app/go.mod @@ -1,4 +1,4 @@ -// Copyright 2024 Canonical Ltd. +// Copyright 2025 Canonical Ltd. // See LICENSE file for licensing details. module go-tracing-app diff --git a/examples/go/go_tracing_app/internal/service/service.go b/examples/go/go_tracing_app/internal/service/service.go index e01b557..d7d9115 100644 --- a/examples/go/go_tracing_app/internal/service/service.go +++ b/examples/go/go_tracing_app/internal/service/service.go @@ -1,4 +1,4 @@ -// Copyright 2024 Canonical Ltd. +// Copyright 2025 Canonical Ltd. // See LICENSE file for licensing details. package service diff --git a/examples/go/go_tracing_app/main.go b/examples/go/go_tracing_app/main.go index 1852985..c4ab87f 100644 --- a/examples/go/go_tracing_app/main.go +++ b/examples/go/go_tracing_app/main.go @@ -1,4 +1,4 @@ -// Copyright 2024 Canonical Ltd. +// Copyright 2025 Canonical Ltd. // See LICENSE file for licensing details. package main diff --git a/examples/go/go_tracing_app/migrate.sh b/examples/go/go_tracing_app/migrate.sh index 5a0f850..80b4034 100755 --- a/examples/go/go_tracing_app/migrate.sh +++ b/examples/go/go_tracing_app/migrate.sh @@ -1,5 +1,5 @@ #!/bin/sh -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. PGPASSWORD="${POSTGRESQL_DB_PASSWORD}" psql -h "${POSTGRESQL_DB_HOSTNAME}" -U "${POSTGRESQL_DB_USERNAME}" "${POSTGRESQL_DB_NAME}" -c "CREATE TABLE IF NOT EXISTS USERS(NAME CHAR(50));" diff --git a/examples/go/go_tracing_app/rockcraft.yaml b/examples/go/go_tracing_app/rockcraft.yaml index c4b8f29..5488285 100644 --- a/examples/go/go_tracing_app/rockcraft.yaml +++ b/examples/go/go_tracing_app/rockcraft.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. name: go-tracing-app # see https://documentation.ubuntu.com/rockcraft/en/stable/explanation/bases/ diff --git a/localstack-installation.sh b/localstack-installation.sh index ff83900..b902402 100755 --- a/localstack-installation.sh +++ b/localstack-installation.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. pip install pip --upgrade diff --git a/pyproject.toml b/pyproject.toml index ee2c452..e8e2bd1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. [project] name = "paas-charm" diff --git a/setup.py b/setup.py index 3649250..0b374f7 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from setuptools import setup diff --git a/src/paas_app_charmer/__init__.py b/src/paas_app_charmer/__init__.py index 10d681f..4001c98 100644 --- a/src/paas_app_charmer/__init__.py +++ b/src/paas_app_charmer/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Deprecated entrypoints for charms. diff --git a/src/paas_app_charmer/django/__init__.py b/src/paas_app_charmer/django/__init__.py index 0a40a7f..2dcb9cc 100644 --- a/src/paas_app_charmer/django/__init__.py +++ b/src/paas_app_charmer/django/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Deprecated Django module entrypoint. diff --git a/src/paas_app_charmer/django/charm.py b/src/paas_app_charmer/django/charm.py index e27606d..9b06646 100644 --- a/src/paas_app_charmer/django/charm.py +++ b/src/paas_app_charmer/django/charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Deprecated Django Charm service. diff --git a/src/paas_app_charmer/fastapi/__init__.py b/src/paas_app_charmer/fastapi/__init__.py index 619b31a..c9b5c60 100644 --- a/src/paas_app_charmer/fastapi/__init__.py +++ b/src/paas_app_charmer/fastapi/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Deprecated FastAPI module entrypoint. diff --git a/src/paas_app_charmer/fastapi/charm.py b/src/paas_app_charmer/fastapi/charm.py index 4c00de8..dd87078 100644 --- a/src/paas_app_charmer/fastapi/charm.py +++ b/src/paas_app_charmer/fastapi/charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/src/paas_app_charmer/flask/__init__.py b/src/paas_app_charmer/flask/__init__.py index e67d19a..f7faa51 100644 --- a/src/paas_app_charmer/flask/__init__.py +++ b/src/paas_app_charmer/flask/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Deprecated Flask module entrypoint. diff --git a/src/paas_app_charmer/flask/charm.py b/src/paas_app_charmer/flask/charm.py index 6dd97fe..617174f 100644 --- a/src/paas_app_charmer/flask/charm.py +++ b/src/paas_app_charmer/flask/charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Deprecated Flask Charm service. diff --git a/src/paas_app_charmer/go/__init__.py b/src/paas_app_charmer/go/__init__.py index 3eade92..97744b8 100644 --- a/src/paas_app_charmer/go/__init__.py +++ b/src/paas_app_charmer/go/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Deprecated Go module entrypoint. diff --git a/src/paas_app_charmer/go/charm.py b/src/paas_app_charmer/go/charm.py index ad8a3fa..e6ab63a 100644 --- a/src/paas_app_charmer/go/charm.py +++ b/src/paas_app_charmer/go/charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Deprecated Go Charm service. diff --git a/src/paas_charm/__init__.py b/src/paas_charm/__init__.py index 2457d3c..681e2ec 100644 --- a/src/paas_charm/__init__.py +++ b/src/paas_charm/__init__.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Module __init__.""" diff --git a/src/paas_charm/_gunicorn/__init__.py b/src/paas_charm/_gunicorn/__init__.py index e3979c0..dddb292 100644 --- a/src/paas_charm/_gunicorn/__init__.py +++ b/src/paas_charm/_gunicorn/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/src/paas_charm/_gunicorn/charm.py b/src/paas_charm/_gunicorn/charm.py index 33c7e4e..a7d41bf 100644 --- a/src/paas_charm/_gunicorn/charm.py +++ b/src/paas_charm/_gunicorn/charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """The base charm class for all charms.""" diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index 92b5b2f..d34c2ac 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Provide the GunicornWebserver class to represent the gunicorn server.""" diff --git a/src/paas_charm/_gunicorn/workload_config.py b/src/paas_charm/_gunicorn/workload_config.py index f0e8d0c..b9d007a 100644 --- a/src/paas_charm/_gunicorn/workload_config.py +++ b/src/paas_charm/_gunicorn/workload_config.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """This module defines the WorloadConfig class which represents configuration for the workload.""" diff --git a/src/paas_charm/_gunicorn/wsgi_app.py b/src/paas_charm/_gunicorn/wsgi_app.py index f6e3e20..cc41fcc 100644 --- a/src/paas_charm/_gunicorn/wsgi_app.py +++ b/src/paas_charm/_gunicorn/wsgi_app.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Provide the WsgiApp class to represent the WSGI application.""" diff --git a/src/paas_charm/app.py b/src/paas_charm/app.py index c1b362f..499e64b 100644 --- a/src/paas_charm/app.py +++ b/src/paas_charm/app.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Provide the base generic class to represent the application.""" diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 5c53363..0ce861b 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """The base charm class for all application charms.""" diff --git a/src/paas_charm/charm_state.py b/src/paas_charm/charm_state.py index 5d83524..0ea2622 100644 --- a/src/paas_charm/charm_state.py +++ b/src/paas_charm/charm_state.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """This module defines the CharmState class which represents the state of the charm.""" diff --git a/src/paas_charm/charm_utils.py b/src/paas_charm/charm_utils.py index a8afb98..f95402b 100644 --- a/src/paas_charm/charm_utils.py +++ b/src/paas_charm/charm_utils.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """This module defines utility functions to use by the Charm.""" diff --git a/src/paas_charm/database_migration.py b/src/paas_charm/database_migration.py index 27eda9c..3e63287 100644 --- a/src/paas_charm/database_migration.py +++ b/src/paas_charm/database_migration.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Provide the DatabaseMigration class to manage database migrations.""" diff --git a/src/paas_charm/databases.py b/src/paas_charm/databases.py index 8dd9689..d57621d 100644 --- a/src/paas_charm/databases.py +++ b/src/paas_charm/databases.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Provide the Databases class to handle database relations and state.""" diff --git a/src/paas_charm/django/__init__.py b/src/paas_charm/django/__init__.py index e95aa88..ea115f7 100644 --- a/src/paas_charm/django/__init__.py +++ b/src/paas_charm/django/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Django module entrypoint.""" diff --git a/src/paas_charm/django/charm.py b/src/paas_charm/django/charm.py index bcf7371..cdc4cbf 100644 --- a/src/paas_charm/django/charm.py +++ b/src/paas_charm/django/charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Django Charm service.""" diff --git a/src/paas_charm/exceptions.py b/src/paas_charm/exceptions.py index 1fc561e..eba94a1 100644 --- a/src/paas_charm/exceptions.py +++ b/src/paas_charm/exceptions.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Exceptions used by charms.""" diff --git a/src/paas_charm/fastapi/__init__.py b/src/paas_charm/fastapi/__init__.py index 6758ba1..b9e839a 100644 --- a/src/paas_charm/fastapi/__init__.py +++ b/src/paas_charm/fastapi/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """FastAPI module entrypoint.""" diff --git a/src/paas_charm/fastapi/charm.py b/src/paas_charm/fastapi/charm.py index c13f2a3..9a0ec31 100644 --- a/src/paas_charm/fastapi/charm.py +++ b/src/paas_charm/fastapi/charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """FastAPI Charm service.""" diff --git a/src/paas_charm/flask/__init__.py b/src/paas_charm/flask/__init__.py index d603d8c..fdabae2 100644 --- a/src/paas_charm/flask/__init__.py +++ b/src/paas_charm/flask/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Flask module entrypoint.""" diff --git a/src/paas_charm/flask/charm.py b/src/paas_charm/flask/charm.py index bd20ab9..f3258f4 100644 --- a/src/paas_charm/flask/charm.py +++ b/src/paas_charm/flask/charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Flask Charm service.""" diff --git a/src/paas_charm/framework.py b/src/paas_charm/framework.py index f173d20..2f3cb0a 100644 --- a/src/paas_charm/framework.py +++ b/src/paas_charm/framework.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Framework related base classes.""" diff --git a/src/paas_charm/go/__init__.py b/src/paas_charm/go/__init__.py index 62ab56c..59c0fa4 100644 --- a/src/paas_charm/go/__init__.py +++ b/src/paas_charm/go/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Go module entrypoint.""" diff --git a/src/paas_charm/go/charm.py b/src/paas_charm/go/charm.py index 92b18f8..f86fd20 100644 --- a/src/paas_charm/go/charm.py +++ b/src/paas_charm/go/charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Go Charm service.""" diff --git a/src/paas_charm/observability.py b/src/paas_charm/observability.py index 0ee39f7..c58f20e 100644 --- a/src/paas_charm/observability.py +++ b/src/paas_charm/observability.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Provide the Observability class to represent the observability stack for charms.""" diff --git a/src/paas_charm/rabbitmq.py b/src/paas_charm/rabbitmq.py index 41ff4af..f2dca89 100644 --- a/src/paas_charm/rabbitmq.py +++ b/src/paas_charm/rabbitmq.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """RabbitMQ library for handling the rabbitmq interface. diff --git a/src/paas_charm/secret_storage.py b/src/paas_charm/secret_storage.py index a39656a..b22a59f 100644 --- a/src/paas_charm/secret_storage.py +++ b/src/paas_charm/secret_storage.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Provide the SecretStorage for managing the persistent secret storage for charms.""" diff --git a/src/paas_charm/utils.py b/src/paas_charm/utils.py index 5197045..7d41b86 100644 --- a/src/paas_charm/utils.py +++ b/src/paas_charm/utils.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Generic utility functions.""" diff --git a/tests/__init__.py b/tests/__init__.py index 289a524..073ccfc 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Tests module.""" diff --git a/tests/conftest.py b/tests/conftest.py index d733b20..f6f32b0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Global fixtures and utilities for integration and unit tests.""" diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 124616d..5e885f7 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import json diff --git a/tests/integration/django/__init__.py b/tests/integration/django/__init__.py index e3979c0..dddb292 100644 --- a/tests/integration/django/__init__.py +++ b/tests/integration/django/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/tests/integration/django/conftest.py b/tests/integration/django/conftest.py index ec953ab..343fb2c 100644 --- a/tests/integration/django/conftest.py +++ b/tests/integration/django/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Fixtures for flask charm integration tests.""" diff --git a/tests/integration/django/test_django.py b/tests/integration/django/test_django.py index 7048d66..42ed618 100644 --- a/tests/integration/django/test_django.py +++ b/tests/integration/django/test_django.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for Django charm.""" diff --git a/tests/integration/django/test_django_integrations.py b/tests/integration/django/test_django_integrations.py index a592d54..2f3e898 100644 --- a/tests/integration/django/test_django_integrations.py +++ b/tests/integration/django/test_django_integrations.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for Django charm integrations.""" diff --git a/tests/integration/django/test_workers.py b/tests/integration/django/test_workers.py index 5f82bb3..e38ce5b 100644 --- a/tests/integration/django/test_workers.py +++ b/tests/integration/django/test_workers.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for Django charm.""" diff --git a/tests/integration/fastapi/__init__.py b/tests/integration/fastapi/__init__.py index e3979c0..dddb292 100644 --- a/tests/integration/fastapi/__init__.py +++ b/tests/integration/fastapi/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/tests/integration/fastapi/conftest.py b/tests/integration/fastapi/conftest.py index a4ecac1..c27523d 100644 --- a/tests/integration/fastapi/conftest.py +++ b/tests/integration/fastapi/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Fixtures for FastAPI charm integration tests.""" diff --git a/tests/integration/fastapi/test_fastapi.py b/tests/integration/fastapi/test_fastapi.py index b2273bb..00235fd 100644 --- a/tests/integration/fastapi/test_fastapi.py +++ b/tests/integration/fastapi/test_fastapi.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for FastAPI charm.""" diff --git a/tests/integration/flask/__init__.py b/tests/integration/flask/__init__.py index e3979c0..dddb292 100644 --- a/tests/integration/flask/__init__.py +++ b/tests/integration/flask/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index bc65076..20cb7c3 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Fixtures for flask charm integration tests.""" diff --git a/tests/integration/flask/test_charm.py b/tests/integration/flask/test_charm.py index a5788e5..6019ea6 100644 --- a/tests/integration/flask/test_charm.py +++ b/tests/integration/flask/test_charm.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for Flask charm.""" diff --git a/tests/integration/flask/test_cos.py b/tests/integration/flask/test_cos.py index 2f1e41b..dc78659 100644 --- a/tests/integration/flask/test_cos.py +++ b/tests/integration/flask/test_cos.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for Flask charm COS integration.""" diff --git a/tests/integration/flask/test_database.py b/tests/integration/flask/test_database.py index 9785830..c15f33d 100644 --- a/tests/integration/flask/test_database.py +++ b/tests/integration/flask/test_database.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for Flask charm database integration.""" diff --git a/tests/integration/flask/test_db_migration.py b/tests/integration/flask/test_db_migration.py index 89c13cf..6cbfd2c 100644 --- a/tests/integration/flask/test_db_migration.py +++ b/tests/integration/flask/test_db_migration.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for Flask charm database integration.""" diff --git a/tests/integration/flask/test_integrations.py b/tests/integration/flask/test_integrations.py index 4b343fe..ff7b1e5 100644 --- a/tests/integration/flask/test_integrations.py +++ b/tests/integration/flask/test_integrations.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for Flask charm integrations, like S3 and Saml.""" diff --git a/tests/integration/flask/test_proxy.py b/tests/integration/flask/test_proxy.py index 52e8047..a7efbda 100644 --- a/tests/integration/flask/test_proxy.py +++ b/tests/integration/flask/test_proxy.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for Flask charm proxy setting.""" diff --git a/tests/integration/flask/test_workers.py b/tests/integration/flask/test_workers.py index 323992c..626a823 100644 --- a/tests/integration/flask/test_workers.py +++ b/tests/integration/flask/test_workers.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for Flask workers and schedulers.""" diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py index 06a4c7d..56a6a26 100644 --- a/tests/integration/flask/test_workload_tracing.py +++ b/tests/integration/flask/test_workload_tracing.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for Flask workers and schedulers.""" diff --git a/tests/integration/go/__init__.py b/tests/integration/go/__init__.py index e3979c0..dddb292 100644 --- a/tests/integration/go/__init__.py +++ b/tests/integration/go/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/tests/integration/go/conftest.py b/tests/integration/go/conftest.py index d28408d..e564046 100644 --- a/tests/integration/go/conftest.py +++ b/tests/integration/go/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Fixtures for go charm integration tests.""" diff --git a/tests/integration/go/test_go.py b/tests/integration/go/test_go.py index 2953491..98da98e 100644 --- a/tests/integration/go/test_go.py +++ b/tests/integration/go/test_go.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Integration tests for Go charm.""" diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index e772f6a..974efbe 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. import io diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index e3979c0..dddb292 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/tests/unit/django/__init__.py b/tests/unit/django/__init__.py index e3979c0..dddb292 100644 --- a/tests/unit/django/__init__.py +++ b/tests/unit/django/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/tests/unit/django/conftest.py b/tests/unit/django/conftest.py index 9fbb487..736cb66 100644 --- a/tests/unit/django/conftest.py +++ b/tests/unit/django/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """pytest fixtures for the integration test.""" diff --git a/tests/unit/django/constants.py b/tests/unit/django/constants.py index 4829ade..b78c34f 100644 --- a/tests/unit/django/constants.py +++ b/tests/unit/django/constants.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. DEFAULT_LAYER = { diff --git a/tests/unit/django/test_charm.py b/tests/unit/django/test_charm.py index 88554ed..355c13a 100644 --- a/tests/unit/django/test_charm.py +++ b/tests/unit/django/test_charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Django charm unit tests.""" diff --git a/tests/unit/django/test_workers.py b/tests/unit/django/test_workers.py index e4c605b..3fc3a5a 100644 --- a/tests/unit/django/test_workers.py +++ b/tests/unit/django/test_workers.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Unit tests for worker services.""" diff --git a/tests/unit/fastapi/__init__.py b/tests/unit/fastapi/__init__.py index e3979c0..dddb292 100644 --- a/tests/unit/fastapi/__init__.py +++ b/tests/unit/fastapi/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/tests/unit/fastapi/conftest.py b/tests/unit/fastapi/conftest.py index 8f02658..b181616 100644 --- a/tests/unit/fastapi/conftest.py +++ b/tests/unit/fastapi/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """pytest fixtures for the fastapi unit test.""" diff --git a/tests/unit/fastapi/constants.py b/tests/unit/fastapi/constants.py index ea7e550..b614c2f 100644 --- a/tests/unit/fastapi/constants.py +++ b/tests/unit/fastapi/constants.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. DEFAULT_LAYER = { diff --git a/tests/unit/fastapi/test_charm.py b/tests/unit/fastapi/test_charm.py index bae427c..96211b7 100644 --- a/tests/unit/fastapi/test_charm.py +++ b/tests/unit/fastapi/test_charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """FastAPI charm unit tests.""" diff --git a/tests/unit/flask/__init__.py b/tests/unit/flask/__init__.py index e3979c0..dddb292 100644 --- a/tests/unit/flask/__init__.py +++ b/tests/unit/flask/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/tests/unit/flask/conftest.py b/tests/unit/flask/conftest.py index cd4d210..374be2f 100644 --- a/tests/unit/flask/conftest.py +++ b/tests/unit/flask/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """pytest fixtures for the integration test.""" diff --git a/tests/unit/flask/constants.py b/tests/unit/flask/constants.py index 68756a6..285ea32 100644 --- a/tests/unit/flask/constants.py +++ b/tests/unit/flask/constants.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. from secrets import token_hex diff --git a/tests/unit/flask/test_charm.py b/tests/unit/flask/test_charm.py index 5acf5cf..bd72614 100644 --- a/tests/unit/flask/test_charm.py +++ b/tests/unit/flask/test_charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Flask charm unit tests.""" diff --git a/tests/unit/flask/test_charm_state.py b/tests/unit/flask/test_charm_state.py index 88a5416..c59a6c7 100644 --- a/tests/unit/flask/test_charm_state.py +++ b/tests/unit/flask/test_charm_state.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Flask charm state unit tests.""" diff --git a/tests/unit/flask/test_database_migration.py b/tests/unit/flask/test_database_migration.py index c7e4ab4..68a60d2 100644 --- a/tests/unit/flask/test_database_migration.py +++ b/tests/unit/flask/test_database_migration.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Unit tests for Flask charm database integration.""" diff --git a/tests/unit/flask/test_databases.py b/tests/unit/flask/test_databases.py index 9b31ceb..53f28bc 100644 --- a/tests/unit/flask/test_databases.py +++ b/tests/unit/flask/test_databases.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Flask charm database relations unit tests.""" diff --git a/tests/unit/flask/test_flask_app.py b/tests/unit/flask/test_flask_app.py index a647a48..4c5879e 100644 --- a/tests/unit/flask/test_flask_app.py +++ b/tests/unit/flask/test_flask_app.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Flask charm unit tests for the flask_app module.""" diff --git a/tests/unit/flask/test_tracing.py b/tests/unit/flask/test_tracing.py index 9403571..b15428a 100644 --- a/tests/unit/flask/test_tracing.py +++ b/tests/unit/flask/test_tracing.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Flask charm tracing relation unit tests.""" diff --git a/tests/unit/flask/test_webserver.py b/tests/unit/flask/test_webserver.py index dd54d78..3117a7a 100644 --- a/tests/unit/flask/test_webserver.py +++ b/tests/unit/flask/test_webserver.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Flask charm unit tests for the webserver module.""" diff --git a/tests/unit/flask/test_workers.py b/tests/unit/flask/test_workers.py index 9e0d077..5ceb26b 100644 --- a/tests/unit/flask/test_workers.py +++ b/tests/unit/flask/test_workers.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Unit tests for worker services.""" diff --git a/tests/unit/go/__init__.py b/tests/unit/go/__init__.py index e3979c0..dddb292 100644 --- a/tests/unit/go/__init__.py +++ b/tests/unit/go/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. diff --git a/tests/unit/go/conftest.py b/tests/unit/go/conftest.py index a7bfd0c..8868bf4 100644 --- a/tests/unit/go/conftest.py +++ b/tests/unit/go/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """pytest fixtures for the go unit test.""" diff --git a/tests/unit/go/constants.py b/tests/unit/go/constants.py index 2f097ba..cee33b5 100644 --- a/tests/unit/go/constants.py +++ b/tests/unit/go/constants.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. DEFAULT_LAYER = { diff --git a/tests/unit/go/test_app.py b/tests/unit/go/test_app.py index 70eaa77..7ff6149 100644 --- a/tests/unit/go/test_app.py +++ b/tests/unit/go/test_app.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Go charm unit tests for the generic app module.""" diff --git a/tests/unit/go/test_charm.py b/tests/unit/go/test_charm.py index 0f532fa..ebdfddc 100644 --- a/tests/unit/go/test_charm.py +++ b/tests/unit/go/test_charm.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Go charm unit tests.""" diff --git a/tests/unit/test_deprecated.py b/tests/unit/test_deprecated.py index 8a352f8..c150c07 100644 --- a/tests/unit/test_deprecated.py +++ b/tests/unit/test_deprecated.py @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. """Test deprecated Charm entrypoints.""" diff --git a/tox.ini b/tox.ini index dcd43f0..9f1dd56 100644 --- a/tox.ini +++ b/tox.ini @@ -1,4 +1,4 @@ -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. [tox] From 47f1de34bbc71ae759ff180ea3569056e2e87bb0 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Fri, 10 Jan 2025 12:05:45 +0300 Subject: [PATCH 27/70] chore(test): Fix test --- tests/integration/flask/conftest.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index 20cb7c3..29d6484 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -159,7 +159,7 @@ async def flask_tracing_app_fixture(build_charm: str, model: Model, test_tracing return app -async def deploy_and_configure_minio(ops_test: OpsTest) -> None: +async def deploy_and_configure_minio(ops_test: OpsTest, get_unit_ips) -> None: """Deploy and set up minio and s3-integrator needed for s3-like storage backend in the HA charms.""" config = { "access-key": "accesskey", @@ -169,7 +169,7 @@ async def deploy_and_configure_minio(ops_test: OpsTest) -> None: await ops_test.model.wait_for_idle( apps=["minio"], status="active", timeout=2000, idle_period=45 ) - minio_addr = await unit_address(ops_test, "minio", 0) + minio_addr = (await get_unit_ips("minio"))[0] mc_client = Minio( f"{minio_addr}:9000", @@ -200,7 +200,7 @@ async def deploy_and_configure_minio(ops_test: OpsTest) -> None: @pytest_asyncio.fixture(scope="module", name="tempo_app") -async def deploy_tempo_cluster(ops_test: OpsTest): +async def deploy_tempo_cluster(ops_test: OpsTest, get_unit_ips): """Deploys tempo in its HA version together with minio and s3-integrator.""" tempo_app = "tempo" worker_app = "tempo-worker" @@ -220,7 +220,7 @@ async def deploy_tempo_cluster(ops_test: OpsTest): await ops_test.model.integrate(tempo_app + ":s3", "s3-integrator" + ":s3-credentials") await ops_test.model.integrate(tempo_app + ":tempo-cluster", worker_app + ":tempo-cluster") - await deploy_and_configure_minio(ops_test) + await deploy_and_configure_minio(ops_test, get_unit_ips) async with ops_test.fast_forward(): await ops_test.model.wait_for_idle( apps=[tempo_app, worker_app, "s3-integrator"], From f7e5225eb261e8b3bb91dff05627e010157638f3 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 13 Jan 2025 14:18:35 +0300 Subject: [PATCH 28/70] chore(test): Fix tracing integration test --- .../data_platform_libs/v0/data_interfaces.py | 530 ++++++++++++++---- tests/integration/flask/conftest.py | 14 +- .../flask/test_workload_tracing.py | 30 +- tests/integration/helpers.py | 9 +- tox.ini | 1 + 5 files changed, 440 insertions(+), 144 deletions(-) diff --git a/examples/flask/lib/charms/data_platform_libs/v0/data_interfaces.py b/examples/flask/lib/charms/data_platform_libs/v0/data_interfaces.py index b331bdc..3bc2dd8 100644 --- a/examples/flask/lib/charms/data_platform_libs/v0/data_interfaces.py +++ b/examples/flask/lib/charms/data_platform_libs/v0/data_interfaces.py @@ -331,10 +331,14 @@ def _on_topic_requested(self, event: TopicRequestedEvent): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 36 +LIBPATCH = 40 PYDEPS = ["ops>=2.0.0"] +# Starting from what LIBPATCH number to apply legacy solutions +# v0.17 was the last version without secrets +LEGACY_SUPPORT_FROM = 17 + logger = logging.getLogger(__name__) Diff = namedtuple("Diff", "added changed deleted") @@ -351,36 +355,16 @@ def _on_topic_requested(self, event: TopicRequestedEvent): GROUP_MAPPING_FIELD = "secret_group_mapping" GROUP_SEPARATOR = "@" +MODEL_ERRORS = { + "not_leader": "this unit is not the leader", + "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", + "owner_no_refresh": "ERROR secret owner cannot use --refresh", +} -class SecretGroup(str): - """Secret groups specific type.""" - - -class SecretGroupsAggregate(str): - """Secret groups with option to extend with additional constants.""" - - def __init__(self): - self.USER = SecretGroup("user") - self.TLS = SecretGroup("tls") - self.EXTRA = SecretGroup("extra") - - def __setattr__(self, name, value): - """Setting internal constants.""" - if name in self.__dict__: - raise RuntimeError("Can't set constant!") - else: - super().__setattr__(name, SecretGroup(value)) - - def groups(self) -> list: - """Return the list of stored SecretGroups.""" - return list(self.__dict__.values()) - - def get_group(self, group: str) -> Optional[SecretGroup]: - """If the input str translates to a group name, return that.""" - return SecretGroup(group) if group in self.groups() else None - -SECRET_GROUPS = SecretGroupsAggregate() +############################################################################## +# Exceptions +############################################################################## class DataInterfacesError(Exception): @@ -407,6 +391,19 @@ class IllegalOperationError(DataInterfacesError): """To be used when an operation is not allowed to be performed.""" +class PrematureDataAccessError(DataInterfacesError): + """To be raised when the Relation Data may be accessed (written) before protocol init complete.""" + + +############################################################################## +# Global helpers / utilities +############################################################################## + +############################################################################## +# Databag handling and comparison methods +############################################################################## + + def get_encoded_dict( relation: Relation, member: Union[Unit, Application], field: str ) -> Optional[Dict[str, str]]: @@ -482,6 +479,11 @@ def diff(event: RelationChangedEvent, bucket: Optional[Union[Unit, Application]] return Diff(added, changed, deleted) +############################################################################## +# Module decorators +############################################################################## + + def leader_only(f): """Decorator to ensure that only leader can perform given operation.""" @@ -536,6 +538,36 @@ def wrapper(self, *args, **kwargs): return wrapper +def legacy_apply_from_version(version: int) -> Callable: + """Decorator to decide whether to apply a legacy function or not. + + Based on LEGACY_SUPPORT_FROM module variable value, the importer charm may only want + to apply legacy solutions starting from a specific LIBPATCH. + + NOTE: All 'legacy' functions have to be defined and called in a way that they return `None`. + This results in cleaner and more secure execution flows in case the function may be disabled. + This requirement implicitly means that legacy functions change the internal state strictly, + don't return information. + """ + + def decorator(f: Callable[..., None]): + """Signature is ensuring None return value.""" + f.legacy_version = version + + def wrapper(self, *args, **kwargs) -> None: + if version >= LEGACY_SUPPORT_FROM: + return f(self, *args, **kwargs) + + return wrapper + + return decorator + + +############################################################################## +# Helper classes +############################################################################## + + class Scope(Enum): """Peer relations scope.""" @@ -543,9 +575,35 @@ class Scope(Enum): UNIT = "unit" -################################################################################ -# Secrets internal caching -################################################################################ +class SecretGroup(str): + """Secret groups specific type.""" + + +class SecretGroupsAggregate(str): + """Secret groups with option to extend with additional constants.""" + + def __init__(self): + self.USER = SecretGroup("user") + self.TLS = SecretGroup("tls") + self.EXTRA = SecretGroup("extra") + + def __setattr__(self, name, value): + """Setting internal constants.""" + if name in self.__dict__: + raise RuntimeError("Can't set constant!") + else: + super().__setattr__(name, SecretGroup(value)) + + def groups(self) -> list: + """Return the list of stored SecretGroups.""" + return list(self.__dict__.values()) + + def get_group(self, group: str) -> Optional[SecretGroup]: + """If the input str translates to a group name, return that.""" + return SecretGroup(group) if group in self.groups() else None + + +SECRET_GROUPS = SecretGroupsAggregate() class CachedSecret: @@ -554,6 +612,8 @@ class CachedSecret: The data structure is precisely re-using/simulating as in the actual Secret Storage """ + KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] + def __init__( self, model: Model, @@ -571,6 +631,95 @@ def __init__( self.legacy_labels = legacy_labels self.current_label = None + @property + def meta(self) -> Optional[Secret]: + """Getting cached secret meta-information.""" + if not self._secret_meta: + if not (self._secret_uri or self.label): + return + + try: + self._secret_meta = self._model.get_secret(label=self.label) + except SecretNotFoundError: + # Falling back to seeking for potential legacy labels + self._legacy_compat_find_secret_by_old_label() + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) + return self._secret_meta + + ########################################################################## + # Backwards compatibility / Upgrades + ########################################################################## + # These functions are used to keep backwards compatibility on rolling upgrades + # Policy: + # All data is kept intact until the first write operation. (This allows a minimal + # grace period during which rollbacks are fully safe. For more info see the spec.) + # All data involves: + # - databag contents + # - secrets content + # - secret labels (!!!) + # Legacy functions must return None, and leave an equally consistent state whether + # they are executed or skipped (as a high enough versioned execution environment may + # not require so) + + # Compatibility + + @legacy_apply_from_version(34) + def _legacy_compat_find_secret_by_old_label(self) -> None: + """Compatibility function, allowing to find a secret by a legacy label. + + This functionality is typically needed when secret labels changed over an upgrade. + Until the first write operation, we need to maintain data as it was, including keeping + the old secret label. In order to keep track of the old label currently used to access + the secret, and additional 'current_label' field is being defined. + """ + for label in self.legacy_labels: + try: + self._secret_meta = self._model.get_secret(label=label) + except SecretNotFoundError: + pass + else: + if label != self.label: + self.current_label = label + return + + # Migrations + + @legacy_apply_from_version(34) + def _legacy_migration_to_new_label_if_needed(self) -> None: + """Helper function to re-create the secret with a different label. + + Juju does not provide a way to change secret labels. + Thus whenever moving from secrets version that involves secret label changes, + we "re-create" the existing secret, and attach the new label to the new + secret, to be used from then on. + + Note: we replace the old secret with a new one "in place", as we can't + easily switch the containing SecretCache structure to point to a new secret. + Instead we are changing the 'self' (CachedSecret) object to point to the + new instance. + """ + if not self.current_label or not (self.meta and self._secret_meta): + return + + # Create a new secret with the new label + content = self._secret_meta.get_content() + self._secret_uri = None + + # It will be nice to have the possibility to check if we are the owners of the secret... + try: + self._secret_meta = self.add_secret(content, label=self.label) + except ModelError as err: + if MODEL_ERRORS["not_leader"] not in str(err): + raise + self.current_label = None + + ########################################################################## + # Public functions + ########################################################################## + def add_secret( self, content: Dict[str, str], @@ -593,28 +742,6 @@ def add_secret( self._secret_meta = secret return self._secret_meta - @property - def meta(self) -> Optional[Secret]: - """Getting cached secret meta-information.""" - if not self._secret_meta: - if not (self._secret_uri or self.label): - return - - for label in [self.label] + self.legacy_labels: - try: - self._secret_meta = self._model.get_secret(label=label) - except SecretNotFoundError: - pass - else: - if label != self.label: - self.current_label = label - break - - # If still not found, to be checked by URI, to be labelled with the proposed label - if not self._secret_meta and self._secret_uri: - self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) - return self._secret_meta - def get_content(self) -> Dict[str, str]: """Getting cached secret content.""" if not self._secret_content: @@ -624,42 +751,25 @@ def get_content(self) -> Dict[str, str]: except (ValueError, ModelError) as err: # https://bugs.launchpad.net/juju/+bug/2042596 # Only triggered when 'refresh' is set - known_model_errors = [ - "ERROR either URI or label should be used for getting an owned secret but not both", - "ERROR secret owner cannot use --refresh", - ] if isinstance(err, ModelError) and not any( - msg in str(err) for msg in known_model_errors + msg in str(err) for msg in self.KNOWN_MODEL_ERRORS ): raise # Due to: ValueError: Secret owner cannot use refresh=True self._secret_content = self.meta.get_content() return self._secret_content - def _move_to_new_label_if_needed(self): - """Helper function to re-create the secret with a different label.""" - if not self.current_label or not (self.meta and self._secret_meta): - return - - # Create a new secret with the new label - content = self._secret_meta.get_content() - self._secret_uri = None - - # I wish we could just check if we are the owners of the secret... - try: - self._secret_meta = self.add_secret(content, label=self.label) - except ModelError as err: - if "this unit is not the leader" not in str(err): - raise - self.current_label = None - def set_content(self, content: Dict[str, str]) -> None: """Setting cached secret content.""" if not self.meta: return + # DPE-4182: do not create new revision if the content stay the same + if content == self.get_content(): + return + if content: - self._move_to_new_label_if_needed() + self._legacy_migration_to_new_label_if_needed() self.meta.set_content(content) self._secret_content = content else: @@ -922,6 +1032,23 @@ def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" raise NotImplementedError + # Optional overrides + + def _legacy_apply_on_fetch(self) -> None: + """This function should provide a list of compatibility functions to be applied when fetching (legacy) data.""" + pass + + def _legacy_apply_on_update(self, fields: List[str]) -> None: + """This function should provide a list of compatibility functions to be applied when writing data. + + Since data may be at a legacy version, migration may be mandatory. + """ + pass + + def _legacy_apply_on_delete(self, fields: List[str]) -> None: + """This function should provide a list of compatibility functions to be applied when deleting (legacy) data.""" + pass + # Internal helper methods @staticmethod @@ -1174,6 +1301,16 @@ def get_relation(self, relation_name, relation_id) -> Relation: return relation + def get_secret_uri(self, relation: Relation, group: SecretGroup) -> Optional[str]: + """Get the secret URI for the corresponding group.""" + secret_field = self._generate_secret_field_name(group) + return relation.data[self.component].get(secret_field) + + def set_secret_uri(self, relation: Relation, group: SecretGroup, secret_uri: str) -> None: + """Set the secret URI for the corresponding group.""" + secret_field = self._generate_secret_field_name(group) + relation.data[self.component][secret_field] = secret_uri + def fetch_relation_data( self, relation_ids: Optional[List[int]] = None, @@ -1190,6 +1327,8 @@ def fetch_relation_data( a dict of the values stored in the relation data bag for all relation instances (indexed by the relation ID). """ + self._legacy_apply_on_fetch() + if not relation_name: relation_name = self.relation_name @@ -1228,6 +1367,8 @@ def fetch_my_relation_data( NOTE: Since only the leader can read the relation's 'this_app'-side Application databag, the functionality is limited to leaders """ + self._legacy_apply_on_fetch() + if not relation_name: relation_name = self.relation_name @@ -1259,6 +1400,8 @@ def fetch_my_relation_field( @leader_only def update_relation_data(self, relation_id: int, data: dict) -> None: """Update the data within the relation.""" + self._legacy_apply_on_update(list(data.keys())) + relation_name = self.relation_name relation = self.get_relation(relation_name, relation_id) return self._update_relation_data(relation, data) @@ -1266,6 +1409,8 @@ def update_relation_data(self, relation_id: int, data: dict) -> None: @leader_only def delete_relation_data(self, relation_id: int, fields: List[str]) -> None: """Remove field from the relation.""" + self._legacy_apply_on_delete(fields) + relation_name = self.relation_name relation = self.get_relation(relation_name, relation_id) return self._delete_relation_data(relation, fields) @@ -1312,6 +1457,8 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: class ProviderData(Data): """Base provides-side of the data products relation.""" + RESOURCE_FIELD = "database" + def __init__( self, model: Model, @@ -1332,8 +1479,7 @@ def _add_relation_secret( uri_to_databag=True, ) -> bool: """Add a new Juju Secret that will be registered in the relation databag.""" - secret_field = self._generate_secret_field_name(group_mapping) - if uri_to_databag and relation.data[self.component].get(secret_field): + if uri_to_databag and self.get_secret_uri(relation, group_mapping): logging.error("Secret for relation %s already exists, not adding again", relation.id) return False @@ -1344,7 +1490,7 @@ def _add_relation_secret( # According to lint we may not have a Secret ID if uri_to_databag and secret.meta and secret.meta.id: - relation.data[self.component][secret_field] = secret.meta.id + self.set_secret_uri(relation, group_mapping, secret.meta.id) # Return the content that was added return True @@ -1445,8 +1591,7 @@ def _get_relation_secret( if not relation: return - secret_field = self._generate_secret_field_name(group_mapping) - if secret_uri := relation.data[self.local_app].get(secret_field): + if secret_uri := self.get_secret_uri(relation, group_mapping): return self.secrets.get(label, secret_uri) def _fetch_specific_relation_data( @@ -1479,6 +1624,15 @@ def _fetch_my_specific_relation_data( def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: """Set values for fields not caring whether it's a secret or not.""" req_secret_fields = [] + + keys = set(data.keys()) + if self.fetch_relation_field(relation.id, self.RESOURCE_FIELD) is None and ( + keys - {"endpoints", "read-only-endpoints", "replset"} + ): + raise PrematureDataAccessError( + "Premature access to relation data, update is forbidden before the connection is initialized." + ) + if relation.app: req_secret_fields = get_encoded_list(relation, relation.app, REQ_SECRET_FIELDS) @@ -1599,11 +1753,10 @@ def _register_secrets_to_relation(self, relation: Relation, params_name_list: Li for group in SECRET_GROUPS.groups(): secret_field = self._generate_secret_field_name(group) - if secret_field in params_name_list: - if secret_uri := relation.data[relation.app].get(secret_field): - self._register_secret_to_relation( - relation.name, relation.id, secret_uri, group - ) + if secret_field in params_name_list and ( + secret_uri := self.get_secret_uri(relation, group) + ): + self._register_secret_to_relation(relation.name, relation.id, secret_uri, group) def _is_resource_created_for_relation(self, relation: Relation) -> bool: if not relation.app: @@ -1614,6 +1767,17 @@ def _is_resource_created_for_relation(self, relation: Relation) -> bool: ) return bool(data.get("username")) and bool(data.get("password")) + # Public functions + + def get_secret_uri(self, relation: Relation, group: SecretGroup) -> Optional[str]: + """Getting relation secret URI for the corresponding Secret Group.""" + secret_field = self._generate_secret_field_name(group) + return relation.data[relation.app].get(secret_field) + + def set_secret_uri(self, relation: Relation, group: SecretGroup, uri: str) -> None: + """Setting relation secret URI is not possible for a Requirer.""" + raise NotImplementedError("Requirer can not change the relation secret URI.") + def is_resource_created(self, relation_id: Optional[int] = None) -> bool: """Check if the resource has been created. @@ -1764,7 +1928,6 @@ def __init__( secret_field_name: Optional[str] = None, deleted_label: Optional[str] = None, ): - """Manager of base client relations.""" RequirerData.__init__( self, model, @@ -1775,6 +1938,11 @@ def __init__( self.secret_field_name = secret_field_name if secret_field_name else self.SECRET_FIELD_NAME self.deleted_label = deleted_label self._secret_label_map = {} + + # Legacy information holders + self._legacy_labels = [] + self._legacy_secret_uri = None + # Secrets that are being dynamically added within the scope of this event handler run self._new_secrets = [] self._additional_secret_group_mapping = additional_secret_group_mapping @@ -1849,10 +2017,12 @@ def set_secret( value: The string value of the secret group_mapping: The name of the "secret group", in case the field is to be added to an existing secret """ + self._legacy_apply_on_update([field]) + full_field = self._field_to_internal_name(field, group_mapping) if self.secrets_enabled and full_field not in self.current_secret_fields: self._new_secrets.append(full_field) - if self._no_group_with_databag(field, full_field): + if self.valid_field_pattern(field, full_field): self.update_relation_data(relation_id, {full_field: value}) # Unlike for set_secret(), there's no harm using this operation with static secrets @@ -1865,6 +2035,8 @@ def get_secret( group_mapping: Optional[SecretGroup] = None, ) -> Optional[str]: """Public interface method to fetch secrets only.""" + self._legacy_apply_on_fetch() + full_field = self._field_to_internal_name(field, group_mapping) if ( self.secrets_enabled @@ -1872,7 +2044,7 @@ def get_secret( and field not in self.current_secret_fields ): return - if self._no_group_with_databag(field, full_field): + if self.valid_field_pattern(field, full_field): return self.fetch_my_relation_field(relation_id, full_field) @dynamic_secrets_only @@ -1883,14 +2055,19 @@ def delete_secret( group_mapping: Optional[SecretGroup] = None, ) -> Optional[str]: """Public interface method to delete secrets only.""" + self._legacy_apply_on_delete([field]) + full_field = self._field_to_internal_name(field, group_mapping) if self.secrets_enabled and full_field not in self.current_secret_fields: logger.warning(f"Secret {field} from group {group_mapping} was not found") return - if self._no_group_with_databag(field, full_field): + + if self.valid_field_pattern(field, full_field): self.delete_relation_data(relation_id, [full_field]) + ########################################################################## # Helpers + ########################################################################## @staticmethod def _field_to_internal_name(field: str, group: Optional[SecretGroup]) -> str: @@ -1932,10 +2109,69 @@ def _content_for_secret_group( if k in self.secret_fields } - # Backwards compatibility + def valid_field_pattern(self, field: str, full_field: str) -> bool: + """Check that no secret group is attempted to be used together without secrets being enabled. + + Secrets groups are impossible to use with versions that are not yet supporting secrets. + """ + if not self.secrets_enabled and full_field != field: + logger.error( + f"Can't access {full_field}: no secrets available (i.e. no secret groups either)." + ) + return False + return True + + ########################################################################## + # Backwards compatibility / Upgrades + ########################################################################## + # These functions are used to keep backwards compatibility on upgrades + # Policy: + # All data is kept intact until the first write operation. (This allows a minimal + # grace period during which rollbacks are fully safe. For more info see spec.) + # All data involves: + # - databag + # - secrets content + # - secret labels (!!!) + # Legacy functions must return None, and leave an equally consistent state whether + # they are executed or skipped (as a high enough versioned execution environment may + # not require so) + + # Full legacy stack for each operation + + def _legacy_apply_on_fetch(self) -> None: + """All legacy functions to be applied on fetch.""" + relation = self._model.relations[self.relation_name][0] + self._legacy_compat_generate_prev_labels() + self._legacy_compat_secret_uri_from_databag(relation) + + def _legacy_apply_on_update(self, fields) -> None: + """All legacy functions to be applied on update.""" + relation = self._model.relations[self.relation_name][0] + self._legacy_compat_generate_prev_labels() + self._legacy_compat_secret_uri_from_databag(relation) + self._legacy_migration_remove_secret_from_databag(relation, fields) + self._legacy_migration_remove_secret_field_name_from_databag(relation) + + def _legacy_apply_on_delete(self, fields) -> None: + """All legacy functions to be applied on delete.""" + relation = self._model.relations[self.relation_name][0] + self._legacy_compat_generate_prev_labels() + self._legacy_compat_secret_uri_from_databag(relation) + self._legacy_compat_check_deleted_label(relation, fields) + + # Compatibility + + @legacy_apply_from_version(18) + def _legacy_compat_check_deleted_label(self, relation, fields) -> None: + """Helper function for legacy behavior. + + As long as https://bugs.launchpad.net/juju/+bug/2028094 wasn't fixed, + we did not delete fields but rather kept them in the secret with a string value + expressing invalidity. This function is maintainnig that behavior when needed. + """ + if not self.deleted_label: + return - def _check_deleted_label(self, relation, fields) -> None: - """Helper function for legacy behavior.""" current_data = self.fetch_my_relation_data([relation.id], fields) if current_data is not None: # Check if the secret we wanna delete actually exists @@ -1948,7 +2184,43 @@ def _check_deleted_label(self, relation, fields) -> None: ", ".join(non_existent), ) - def _remove_secret_from_databag(self, relation, fields: List[str]) -> None: + @legacy_apply_from_version(18) + def _legacy_compat_secret_uri_from_databag(self, relation) -> None: + """Fetching the secret URI from the databag, in case stored there.""" + self._legacy_secret_uri = relation.data[self.component].get( + self._generate_secret_field_name(), None + ) + + @legacy_apply_from_version(34) + def _legacy_compat_generate_prev_labels(self) -> None: + """Generator for legacy secret label names, for backwards compatibility. + + Secret label is part of the data that MUST be maintained across rolling upgrades. + In case there may be a change on a secret label, the old label must be recognized + after upgrades, and left intact until the first write operation -- when we roll over + to the new label. + + This function keeps "memory" of previously used secret labels. + NOTE: Return value takes decorator into account -- all 'legacy' functions may return `None` + + v0.34 (rev69): Fixing issue https://github.com/canonical/data-platform-libs/issues/155 + meant moving from '.' (i.e. 'mysql.app', 'mysql.unit') + to labels '..' (like 'peer.mysql.app') + """ + if self._legacy_labels: + return + + result = [] + members = [self._model.app.name] + if self.scope: + members.append(self.scope.value) + result.append(f"{'.'.join(members)}") + self._legacy_labels = result + + # Migration + + @legacy_apply_from_version(18) + def _legacy_migration_remove_secret_from_databag(self, relation, fields: List[str]) -> None: """For Rolling Upgrades -- when moving from databag to secrets usage. Practically what happens here is to remove stuff from the databag that is @@ -1962,10 +2234,16 @@ def _remove_secret_from_databag(self, relation, fields: List[str]) -> None: if self._fetch_relation_data_without_secrets(self.component, relation, [field]): self._delete_relation_data_without_secrets(self.component, relation, [field]) - def _remove_secret_field_name_from_databag(self, relation) -> None: + @legacy_apply_from_version(18) + def _legacy_migration_remove_secret_field_name_from_databag(self, relation) -> None: """Making sure that the old databag URI is gone. This action should not be executed more than once. + + There was a phase (before moving secrets usage to libs) when charms saved the peer + secret URI to the databag, and used this URI from then on to retrieve their secret. + When upgrading to charm versions using this library, we need to add a label to the + secret and access it via label from than on, and remove the old traces from the databag. """ # Nothing to do if 'internal-secret' is not in the databag if not (relation.data[self.component].get(self._generate_secret_field_name())): @@ -1981,25 +2259,9 @@ def _remove_secret_field_name_from_databag(self, relation) -> None: # Databag reference to the secret URI can be removed, now that it's labelled relation.data[self.component].pop(self._generate_secret_field_name(), None) - def _previous_labels(self) -> List[str]: - """Generator for legacy secret label names, for backwards compatibility.""" - result = [] - members = [self._model.app.name] - if self.scope: - members.append(self.scope.value) - result.append(f"{'.'.join(members)}") - return result - - def _no_group_with_databag(self, field: str, full_field: str) -> bool: - """Check that no secret group is attempted to be used together with databag.""" - if not self.secrets_enabled and full_field != field: - logger.error( - f"Can't access {full_field}: no secrets available (i.e. no secret groups either)." - ) - return False - return True - + ########################################################################## # Event handlers + ########################################################################## def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: """Event emitted when the relation has changed.""" @@ -2009,7 +2271,9 @@ def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: """Event emitted when the secret has changed.""" pass + ########################################################################## # Overrides of Relation Data handling functions + ########################################################################## def _generate_secret_label( self, relation_name: str, relation_id: int, group_mapping: SecretGroup @@ -2046,13 +2310,14 @@ def _get_relation_secret( return label = self._generate_secret_label(relation_name, relation_id, group_mapping) - secret_uri = relation.data[self.component].get(self._generate_secret_field_name(), None) # URI or legacy label is only to applied when moving single legacy secret to a (new) label if group_mapping == SECRET_GROUPS.EXTRA: # Fetching the secret with fallback to URI (in case label is not yet known) # Label would we "stuck" on the secret in case it is found - return self.secrets.get(label, secret_uri, legacy_labels=self._previous_labels()) + return self.secrets.get( + label, self._legacy_secret_uri, legacy_labels=self._legacy_labels + ) return self.secrets.get(label) def _get_group_secret_contents( @@ -2082,7 +2347,6 @@ def _fetch_my_specific_relation_data( @either_static_or_dynamic_secrets def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: """Update data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" - self._remove_secret_from_databag(relation, list(data.keys())) _, normal_fields = self._process_secret_fields( relation, self.secret_fields, @@ -2091,7 +2355,6 @@ def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> Non data=data, uri_to_databag=False, ) - self._remove_secret_field_name_from_databag(relation) normal_content = {k: v for k, v in data.items() if k in normal_fields} self._update_relation_data_without_secrets(self.component, relation, normal_content) @@ -2100,8 +2363,6 @@ def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> Non def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" if self.secret_fields and self.deleted_label: - # Legacy, backwards compatibility - self._check_deleted_label(relation, fields) _, normal_fields = self._process_secret_fields( relation, @@ -2137,7 +2398,9 @@ def fetch_relation_field( "fetch_my_relation_data() and fetch_my_relation_field()" ) + ########################################################################## # Public functions -- inherited + ########################################################################## fetch_my_relation_data = Data.fetch_my_relation_data fetch_my_relation_field = Data.fetch_my_relation_field @@ -2602,6 +2865,14 @@ def set_version(self, relation_id: int, version: str) -> None: """ self.update_relation_data(relation_id, {"version": version}) + def set_subordinated(self, relation_id: int) -> None: + """Raises the subordinated flag in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + """ + self.update_relation_data(relation_id, {"subordinated": "true"}) + class DatabaseProviderEventHandlers(EventHandlers): """Provider-side of the database relation handlers.""" @@ -2838,6 +3109,21 @@ def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: """Event emitted when the database relation has changed.""" + is_subordinate = False + remote_unit_data = None + for key in event.relation.data.keys(): + if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): + remote_unit_data = event.relation.data[key] + elif isinstance(key, Application) and key.name != self.charm.app.name: + is_subordinate = event.relation.data[key].get("subordinated") == "true" + + if is_subordinate: + if not remote_unit_data: + return + + if remote_unit_data.get("state") != "ready": + return + # Check which data has changed to emit customs events. diff = self._diff(event) @@ -3019,6 +3305,8 @@ class KafkaRequiresEvents(CharmEvents): class KafkaProviderData(ProviderData): """Provider-side of the Kafka relation.""" + RESOURCE_FIELD = "topic" + def __init__(self, model: Model, relation_name: str) -> None: super().__init__(model, relation_name) @@ -3268,6 +3556,8 @@ class OpenSearchRequiresEvents(CharmEvents): class OpenSearchProvidesData(ProviderData): """Provider-side of the OpenSearch relation.""" + RESOURCE_FIELD = "index" + def __init__(self, model: Model, relation_name: str) -> None: super().__init__(model, relation_name) diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index 20cb7c3..8d4021e 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -6,7 +6,7 @@ import os import pathlib from secrets import token_hex - +from minio import Minio import boto3 import pytest import pytest_asyncio @@ -159,17 +159,17 @@ async def flask_tracing_app_fixture(build_charm: str, model: Model, test_tracing return app -async def deploy_and_configure_minio(ops_test: OpsTest) -> None: +async def deploy_and_configure_minio(ops_test: OpsTest, get_unit_ips) -> None: """Deploy and set up minio and s3-integrator needed for s3-like storage backend in the HA charms.""" config = { "access-key": "accesskey", "secret-key": "secretkey", } - await ops_test.model.deploy("minio", channel="edge", trust=True, config=config) + minio_app = await ops_test.model.deploy("minio", channel="edge", trust=True, config=config) await ops_test.model.wait_for_idle( - apps=["minio"], status="active", timeout=2000, idle_period=45 + apps=[minio_app.name], status="active", timeout=2000, idle_period=45 ) - minio_addr = await unit_address(ops_test, "minio", 0) + minio_addr = (await get_unit_ips(minio_app.name))[0] mc_client = Minio( f"{minio_addr}:9000", @@ -200,7 +200,7 @@ async def deploy_and_configure_minio(ops_test: OpsTest) -> None: @pytest_asyncio.fixture(scope="module", name="tempo_app") -async def deploy_tempo_cluster(ops_test: OpsTest): +async def deploy_tempo_cluster(ops_test: OpsTest, get_unit_ips): """Deploys tempo in its HA version together with minio and s3-integrator.""" tempo_app = "tempo" worker_app = "tempo-worker" @@ -220,7 +220,7 @@ async def deploy_tempo_cluster(ops_test: OpsTest): await ops_test.model.integrate(tempo_app + ":s3", "s3-integrator" + ":s3-credentials") await ops_test.model.integrate(tempo_app + ":tempo-cluster", worker_app + ":tempo-cluster") - await deploy_and_configure_minio(ops_test) + await deploy_and_configure_minio(ops_test, get_unit_ips) async with ops_test.fast_forward(): await ops_test.model.wait_for_idle( apps=[tempo_app, worker_app, "s3-integrator"], diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py index 56a6a26..92edf79 100644 --- a/tests/integration/flask/test_workload_tracing.py +++ b/tests/integration/flask/test_workload_tracing.py @@ -6,13 +6,13 @@ import asyncio import logging import time +import json import aiohttp import pytest from juju.application import Application from juju.model import Model from pytest_operator.plugin import OpsTest - logger = logging.getLogger(__name__) @@ -39,22 +39,26 @@ async def test_workload_tracing( ) # the flask unit is not important. Take the first one flask_unit_ip = (await get_unit_ips(flask_tracing_app.name))[0] + tempo_host = (await get_unit_ips(tempo_app.name))[0] async def _fetch_page(session): - params = {"duration": 2} - async with session.get(f"http://{flask_unit_ip}:8000", params=params) as response: + async with session.get(f"http://{flask_unit_ip}:8000") as response: return await response.text() + async def _fetch_trace(session): + async with session.get( + f"http://{tempo_host}:3200/api/search?tags=service.name={flask_tracing_app.name}") as response: + text = await response.text() + return json.loads(text)["traces"] + async with aiohttp.ClientSession() as session: - page = _fetch_page(session) - await asyncio.gather([page]) + pages = [_fetch_page(session) for _ in range(5)] + await asyncio.gather(*pages) - print("--------------------------") - print(f"{flask_tracing_app.name}-app") - print("--------------------------") + # wait a little for traces to register + time.sleep(5) # verify workload traces are ingested into Tempo - assert await get_traces_patiently( - await get_application_ip(ops_test, tempo_app.name), - service_name=f"{flask_tracing_app.name}-app", - tls=False, - ) + async with aiohttp.ClientSession() as session: + pages = [_fetch_trace(session) for _ in range(5)] + traces = await asyncio.gather(*pages) + assert len(traces) > 0 diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index 974efbe..c7d324f 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -6,6 +6,7 @@ import pathlib import uuid import zipfile +import json import requests import yaml @@ -48,9 +49,9 @@ def inject_charm_config(charm: pathlib.Path | str, config: dict, tmp_dir: pathli return str(charm) -def get_traces(tempo_host: str, service_name: str, tls=False): +def get_traces(tempo_host: str, service_name: str): """Get traces directly from Tempo REST API.""" - url = f"{'https' if tls else 'http'}://{tempo_host}:3200/api/search?tags=service.name={service_name}" + url = f"http://{tempo_host}:3200/api/search?tags=service.name={service_name}" req = requests.get( url, verify=False, @@ -61,12 +62,12 @@ def get_traces(tempo_host: str, service_name: str, tls=False): @retry(stop=stop_after_attempt(15), wait=wait_exponential(multiplier=1, min=4, max=10)) -async def get_traces_patiently(tempo_host, service_name="tracegen-otlp_http", tls=False): +async def get_traces_patiently(tempo_host, service_name="tracegen-otlp_http"): """Get traces directly from Tempo REST API, but also try multiple times. Useful for cases when Tempo might not return the traces immediately (its API is known for returning data in random order). """ - traces = get_traces(tempo_host, service_name=service_name, tls=tls) + traces = get_traces(tempo_host, service_name=service_name) assert len(traces) > 0 return traces diff --git a/tox.ini b/tox.ini index 9f1dd56..1e4fea3 100644 --- a/tox.ini +++ b/tox.ini @@ -111,6 +111,7 @@ deps = boto3 juju==3.5.2.1 git+https://github.com/canonical/saml-test-idp.git + minio -r{toxinidir}/requirements.txt -r{toxinidir}/tests/integration/flask/requirements.txt commands = From b372dcb04b358d795082d23f60ea06261322fb09 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 13 Jan 2025 14:22:56 +0300 Subject: [PATCH 29/70] chore(test): Merge --- tests/integration/flask/conftest.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index 22ed18d..8d4021e 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -159,7 +159,6 @@ async def flask_tracing_app_fixture(build_charm: str, model: Model, test_tracing return app -async def deploy_and_configure_minio(ops_test: OpsTest, get_unit_ips) -> None: async def deploy_and_configure_minio(ops_test: OpsTest, get_unit_ips) -> None: """Deploy and set up minio and s3-integrator needed for s3-like storage backend in the HA charms.""" config = { From 5b4c95eb211ed3c250772e949a51dda8ed44494c Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 13 Jan 2025 14:27:21 +0300 Subject: [PATCH 30/70] chore(lint): Format code --- src/paas_charm/charm.py | 23 +++++++++++++------ tests/integration/flask/conftest.py | 3 ++- .../flask/test_workload_tracing.py | 6 +++-- tests/integration/helpers.py | 2 +- 4 files changed, 23 insertions(+), 11 deletions(-) diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 0ce861b..0a275ed 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -159,10 +159,13 @@ def _init_redis(self, requires: dict[str, RelationMeta]) -> "RedisRequires | Non if "redis" in requires and requires["redis"].interface_name == "redis": try: _redis = RedisRequires(charm=self, relation_name="redis") - self.framework.observe(self.on.redis_relation_updated, self._on_redis_relation_updated) + self.framework.observe( + self.on.redis_relation_updated, self._on_redis_relation_updated + ) except NameError: logger.exception( - "Missing charm library, please run `charmcraft fetch-lib charms.redis_k8s.v0.redis`" + "Missing charm library, " + "please run `charmcraft fetch-lib charms.redis_k8s.v0.redis`" ) return _redis @@ -184,7 +187,8 @@ def _init_s3(self, requires: dict[str, RelationMeta]) -> "S3Requirer | None": self.framework.observe(_s3.on.credentials_gone, self._on_s3_credential_gone) except NameError: logger.exception( - "Missing charm library, please run `charmcraft fetch-lib charms.data_platform_libs.v0.s3`" + "Missing charm library, " + "please run `charmcraft fetch-lib charms.data_platform_libs.v0.s3`" ) return _s3 @@ -204,7 +208,8 @@ def _init_saml(self, requires: dict[str, RelationMeta]) -> "SamlRequires | None" self.framework.observe(_saml.on.saml_data_available, self._on_saml_data_available) except NameError: logger.exception( - "Missing charm library, please run `charmcraft fetch-lib charms.saml_integrator.v0.saml`" + "Missing charm library, " + "please run `charmcraft fetch-lib charms.saml_integrator.v0.saml`" ) return _saml @@ -246,9 +251,13 @@ def _init_tracing(self, requires: dict[str, RelationMeta]) -> "TracingEndpointRe _tracing = TracingEndpointRequirer( self, relation_name="tracing", protocols=["otlp_http"] ) - self.framework.observe(_tracing.on.endpoint_changed, self._on_tracing_relation_changed) - self.framework.observe(_tracing.on.endpoint_removed, self._on_tracing_relation_broken) - except NameError as e: + self.framework.observe( + _tracing.on.endpoint_changed, self._on_tracing_relation_changed + ) + self.framework.observe( + _tracing.on.endpoint_removed, self._on_tracing_relation_broken + ) + except NameError: logger.exception( "Missing charm library, please run " "`charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing`" diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index 8d4021e..effb079 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -6,13 +6,14 @@ import os import pathlib from secrets import token_hex -from minio import Minio + import boto3 import pytest import pytest_asyncio from botocore.config import Config as BotoConfig from juju.application import Application from juju.model import Model +from minio import Minio from pytest import Config, FixtureRequest from pytest_operator.plugin import OpsTest diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py index 92edf79..4725afd 100644 --- a/tests/integration/flask/test_workload_tracing.py +++ b/tests/integration/flask/test_workload_tracing.py @@ -4,15 +4,16 @@ """Integration tests for Flask workers and schedulers.""" import asyncio +import json import logging import time -import json import aiohttp import pytest from juju.application import Application from juju.model import Model from pytest_operator.plugin import OpsTest + logger = logging.getLogger(__name__) @@ -47,7 +48,8 @@ async def _fetch_page(session): async def _fetch_trace(session): async with session.get( - f"http://{tempo_host}:3200/api/search?tags=service.name={flask_tracing_app.name}") as response: + f"http://{tempo_host}:3200/api/search?tags=service.name={flask_tracing_app.name}" + ) as response: text = await response.text() return json.loads(text)["traces"] diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index c7d324f..edb2708 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -2,11 +2,11 @@ # See LICENSE file for licensing details. import io +import json import os import pathlib import uuid import zipfile -import json import requests import yaml From 02624c59d0545507858bb2d3724f1bb36a67d366 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 15 Jan 2025 07:11:00 +0300 Subject: [PATCH 31/70] chore(Format): Remove unnecessary files and format code --- tests/integration/flask/test_workload_tracing.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py index 4725afd..91451b6 100644 --- a/tests/integration/flask/test_workload_tracing.py +++ b/tests/integration/flask/test_workload_tracing.py @@ -63,4 +63,5 @@ async def _fetch_trace(session): async with aiohttp.ClientSession() as session: pages = [_fetch_trace(session) for _ in range(5)] traces = await asyncio.gather(*pages) + print(traces) assert len(traces) > 0 From 068a3c139fb056861cb2b8e78b5329f7a7a74ce3 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 15 Jan 2025 07:19:34 +0300 Subject: [PATCH 32/70] Chore(test): Fix unit test --- tests/unit/flask/test_tracing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/flask/test_tracing.py b/tests/unit/flask/test_tracing.py index b15428a..ba4dcb9 100644 --- a/tests/unit/flask/test_tracing.py +++ b/tests/unit/flask/test_tracing.py @@ -36,4 +36,4 @@ def test_tracing_relation(harness: Harness): service_env = container.get_plan().services["flask"].environment print(service_env) assert service_env["OTEL_EXPORTER_OTLP_ENDPOINT"] == "http://test-ip:4318" - assert service_env["OTEL_SERVICE_NAME"] == "flask-k8s-app" + assert service_env["OTEL_SERVICE_NAME"] == "flask-k8s" From a12f204ce8836e12ddf43d77859463579d77ca1c Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 15 Jan 2025 09:10:20 +0300 Subject: [PATCH 33/70] Chore(test): Fix false positive --- tests/integration/flask/test_workload_tracing.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py index 91451b6..0728c56 100644 --- a/tests/integration/flask/test_workload_tracing.py +++ b/tests/integration/flask/test_workload_tracing.py @@ -13,6 +13,7 @@ from juju.application import Application from juju.model import Model from pytest_operator.plugin import OpsTest +from tests.integration.helpers import get_traces_patiently logger = logging.getLogger(__name__) @@ -48,7 +49,7 @@ async def _fetch_page(session): async def _fetch_trace(session): async with session.get( - f"http://{tempo_host}:3200/api/search?tags=service.name={flask_tracing_app.name}" + f"http://{tempo_host}:3200/api/search?tags=service.name={flask_tracing_app.charm_name}" ) as response: text = await response.text() return json.loads(text)["traces"] @@ -57,11 +58,5 @@ async def _fetch_trace(session): pages = [_fetch_page(session) for _ in range(5)] await asyncio.gather(*pages) - # wait a little for traces to register - time.sleep(5) # verify workload traces are ingested into Tempo - async with aiohttp.ClientSession() as session: - pages = [_fetch_trace(session) for _ in range(5)] - traces = await asyncio.gather(*pages) - print(traces) - assert len(traces) > 0 + assert await get_traces_patiently(tempo_host, flask_tracing_app.charm_name) From af0c47cca25e03274f80a6c64691246e6983ac5c Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 15 Jan 2025 09:11:08 +0300 Subject: [PATCH 34/70] chore(format):format --- tests/integration/flask/test_workload_tracing.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py index 0728c56..5c5d476 100644 --- a/tests/integration/flask/test_workload_tracing.py +++ b/tests/integration/flask/test_workload_tracing.py @@ -13,6 +13,7 @@ from juju.application import Application from juju.model import Model from pytest_operator.plugin import OpsTest + from tests.integration.helpers import get_traces_patiently logger = logging.getLogger(__name__) From 707af93b0cdd4d92ce2f8817d2add3f1e0a37ae1 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 15 Jan 2025 09:24:29 +0300 Subject: [PATCH 35/70] Chore(test): Skip if juju <=3.4 --- tests/integration/flask/test_workload_tracing.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py index 5c5d476..d925cbd 100644 --- a/tests/integration/flask/test_workload_tracing.py +++ b/tests/integration/flask/test_workload_tracing.py @@ -13,12 +13,19 @@ from juju.application import Application from juju.model import Model from pytest_operator.plugin import OpsTest - +from ops import JujuVersion from tests.integration.helpers import get_traces_patiently logger = logging.getLogger(__name__) +def _tempo_available(): + return JujuVersion.from_environ() < JujuVersion("3.4") + + +@pytest.mark.skipif( + _tempo_available(), reason="tempo needs juju>=3.4" +) @pytest.mark.usefixtures("flask_tracing_app") @pytest.mark.usefixtures("tempo_app") async def test_workload_tracing( From 1ca07aa2534351496f628f726ff17316b2284361 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 15 Jan 2025 09:29:09 +0300 Subject: [PATCH 36/70] Chore(format): Format --- tests/integration/flask/test_workload_tracing.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py index d925cbd..3c3296f 100644 --- a/tests/integration/flask/test_workload_tracing.py +++ b/tests/integration/flask/test_workload_tracing.py @@ -12,8 +12,9 @@ import pytest from juju.application import Application from juju.model import Model -from pytest_operator.plugin import OpsTest from ops import JujuVersion +from pytest_operator.plugin import OpsTest + from tests.integration.helpers import get_traces_patiently logger = logging.getLogger(__name__) @@ -23,9 +24,7 @@ def _tempo_available(): return JujuVersion.from_environ() < JujuVersion("3.4") -@pytest.mark.skipif( - _tempo_available(), reason="tempo needs juju>=3.4" -) +@pytest.mark.skipif(_tempo_available(), reason="tempo needs juju>=3.4") @pytest.mark.usefixtures("flask_tracing_app") @pytest.mark.usefixtures("tempo_app") async def test_workload_tracing( From f566767236229a04593ba7cb6a27622d05c2fc01 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 15 Jan 2025 10:37:56 +0300 Subject: [PATCH 37/70] Chore(test): juju version skip fixed. --- src/paas_charm/charm.py | 2 +- tests/integration/conftest.py | 19 +++++++++++++++++++ .../flask/test_workload_tracing.py | 16 ++-------------- 3 files changed, 22 insertions(+), 15 deletions(-) diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 0a275ed..c75988d 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -486,7 +486,7 @@ def _create_charm_state(self) -> CharmState: if self._tracing and self._tracing.is_ready(): tracing_relation_data = TempoParameters( endpoint=f'{self._tracing.get_endpoint(protocol="otlp_http")}', - service_name=self.framework.meta.name, + service_name=self.app.name, ) return CharmState.from_charm( config=config, diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 5e885f7..f4d7dfa 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -10,6 +10,7 @@ from juju.client.jujudata import FileJujuData from juju.juju import Juju from juju.model import Controller, Model +from ops import JujuVersion from pytest_operator.plugin import OpsTest logger = logging.getLogger(__name__) @@ -93,6 +94,17 @@ async def fixture_model(ops_test: OpsTest) -> Model: return ops_test.model +@pytest.fixture(autouse=True) +def skip_by_juju_version(request, model): + if request.node.get_closest_marker("skip_juju_version"): + current_version = JujuVersion( + f"{model.info.agent_version.major}.{model.info.agent_version.minor}.{model.info.agent_version.patch}" + ) + min_version = JujuVersion(request.node.get_closest_marker("skip_juju_version").args[0]) + if current_version < min_version: + pytest.skip("Juju version is too old") + + @pytest.fixture(scope="module", name="external_hostname") def external_hostname_fixture() -> str: """Return the external hostname for ingress-related tests.""" @@ -182,3 +194,10 @@ async def _run_action(application_name, action_name, **params): return action.results return _run_action + + +def pytest_configure(config): + config.addinivalue_line( + "markers", + "skip_juju_version(version): skip test if Juju version is lower than version", + ) diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py index 3c3296f..adfdc51 100644 --- a/tests/integration/flask/test_workload_tracing.py +++ b/tests/integration/flask/test_workload_tracing.py @@ -12,7 +12,6 @@ import pytest from juju.application import Application from juju.model import Model -from ops import JujuVersion from pytest_operator.plugin import OpsTest from tests.integration.helpers import get_traces_patiently @@ -20,11 +19,7 @@ logger = logging.getLogger(__name__) -def _tempo_available(): - return JujuVersion.from_environ() < JujuVersion("3.4") - - -@pytest.mark.skipif(_tempo_available(), reason="tempo needs juju>=3.4") +@pytest.mark.skip_juju_version("3.4") @pytest.mark.usefixtures("flask_tracing_app") @pytest.mark.usefixtures("tempo_app") async def test_workload_tracing( @@ -54,16 +49,9 @@ async def _fetch_page(session): async with session.get(f"http://{flask_unit_ip}:8000") as response: return await response.text() - async def _fetch_trace(session): - async with session.get( - f"http://{tempo_host}:3200/api/search?tags=service.name={flask_tracing_app.charm_name}" - ) as response: - text = await response.text() - return json.loads(text)["traces"] - async with aiohttp.ClientSession() as session: pages = [_fetch_page(session) for _ in range(5)] await asyncio.gather(*pages) # verify workload traces are ingested into Tempo - assert await get_traces_patiently(tempo_host, flask_tracing_app.charm_name) + assert await get_traces_patiently(tempo_host, flask_tracing_app.name) From 0e090cc54d48876a98c872339bab1804fe203ae4 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 15 Jan 2025 12:24:09 +0300 Subject: [PATCH 38/70] chore(): Skip test at model level --- tests/integration/conftest.py | 25 ++++++------------- .../flask/test_workload_tracing.py | 1 - 2 files changed, 7 insertions(+), 19 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index f4d7dfa..334998b 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -91,20 +91,16 @@ async def get_unit_ips(application_name: str): async def fixture_model(ops_test: OpsTest) -> Model: """Return the current testing juju model.""" assert ops_test.model + # Check the juju version is at least 3.4 to use test workload tracing + current_version = JujuVersion( + f"{ops_test.model.info.agent_version.major}.{ops_test.model.info.agent_version.minor}.{ops_test.model.info.agent_version.patch}" + ) + min_version = JujuVersion("3.4") + if current_version < min_version: + pytest.skip("Juju version is too old for Tempo") return ops_test.model -@pytest.fixture(autouse=True) -def skip_by_juju_version(request, model): - if request.node.get_closest_marker("skip_juju_version"): - current_version = JujuVersion( - f"{model.info.agent_version.major}.{model.info.agent_version.minor}.{model.info.agent_version.patch}" - ) - min_version = JujuVersion(request.node.get_closest_marker("skip_juju_version").args[0]) - if current_version < min_version: - pytest.skip("Juju version is too old") - - @pytest.fixture(scope="module", name="external_hostname") def external_hostname_fixture() -> str: """Return the external hostname for ingress-related tests.""" @@ -194,10 +190,3 @@ async def _run_action(application_name, action_name, **params): return action.results return _run_action - - -def pytest_configure(config): - config.addinivalue_line( - "markers", - "skip_juju_version(version): skip test if Juju version is lower than version", - ) diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py index adfdc51..3ed3213 100644 --- a/tests/integration/flask/test_workload_tracing.py +++ b/tests/integration/flask/test_workload_tracing.py @@ -19,7 +19,6 @@ logger = logging.getLogger(__name__) -@pytest.mark.skip_juju_version("3.4") @pytest.mark.usefixtures("flask_tracing_app") @pytest.mark.usefixtures("tempo_app") async def test_workload_tracing( From 32fb470c720e2243a53813a06f359c9383ac1225 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Wed, 15 Jan 2025 13:00:23 +0300 Subject: [PATCH 39/70] Chore(): Change matrix to not include workload stuff for juju 3.3 --- .github/workflows/integration_test.yaml | 10 ++++++++-- tests/integration/conftest.py | 7 ------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 3769d46..09c9dbc 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -7,14 +7,20 @@ jobs: integration-tests: strategy: matrix: - juju-version: [ 3.3/stable, 3.4/stable, 3.5/stable ] + param: + - juju-version: 3.3/stable + modules: '["test_charm.py", "test_cos.py", "test_database.py", "test_db_migration.py", "test_django.py", "test_django_integrations.py", "test_fastapi.py", "test_go.py", "test_integrations.py", "test_proxy.py", "test_workers.py"]' + - juju-version: 3.4/stable + modules: '["test_charm.py", "test_cos.py", "test_database.py", "test_db_migration.py", "test_django.py", "test_django_integrations.py", "test_fastapi.py", "test_go.py", "test_integrations.py", "test_proxy.py", "test_workers.py", "test_workload_tracing.py"]' + - juju-version: 3.5/stable + modules: '["test_charm.py", "test_cos.py", "test_database.py", "test_db_migration.py", "test_django.py", "test_django_integrations.py", "test_fastapi.py", "test_go.py", "test_integrations.py", "test_proxy.py", "test_workers.py", "test_workload_tracing.py"]' uses: canonical/operator-workflows/.github/workflows/integration_test.yaml@main secrets: inherit with: extra-arguments: -x --localstack-address 172.17.0.1 pre-run-script: localstack-installation.sh charmcraft-channel: latest/edge - modules: '["test_charm.py", "test_cos.py", "test_database.py", "test_db_migration.py", "test_django.py", "test_django_integrations.py", "test_fastapi.py", "test_go.py", "test_integrations.py", "test_proxy.py", "test_workers.py", "test_workload_tracing.py"]' + modules: ${{ matrix.param.modules }} rockcraft-channel: latest/edge juju-channel: ${{ matrix.juju-version }} channel: 1.29-strict/stable diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 334998b..ac6ff6c 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -91,13 +91,6 @@ async def get_unit_ips(application_name: str): async def fixture_model(ops_test: OpsTest) -> Model: """Return the current testing juju model.""" assert ops_test.model - # Check the juju version is at least 3.4 to use test workload tracing - current_version = JujuVersion( - f"{ops_test.model.info.agent_version.major}.{ops_test.model.info.agent_version.minor}.{ops_test.model.info.agent_version.patch}" - ) - min_version = JujuVersion("3.4") - if current_version < min_version: - pytest.skip("Juju version is too old for Tempo") return ops_test.model From 66fb938b01c262aed2d3d04ac6399462425242a1 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Thu, 16 Jan 2025 13:32:17 +0300 Subject: [PATCH 40/70] chore(test): Move all tracing tests into one module and parametrize frameworks. --- .github/workflows/integration_test.yaml | 10 +- .../django_tracing_app/urls.py | 2 +- .../lib/charms/traefik_k8s/v2/ingress.py | 27 +- tests/integration/django/conftest.py | 9 - tests/integration/fastapi/conftest.py | 9 - tests/integration/flask/conftest.py | 98 ------- tests/integration/flask/requirements.txt | 1 + .../flask/test_workload_tracing.py | 56 ---- tests/integration/go/conftest.py | 9 - tests/integration/integrations/conftest.py | 268 ++++++++++++++++++ .../integration/integrations/test_tracing.py | 90 ++++++ 11 files changed, 380 insertions(+), 199 deletions(-) delete mode 100644 tests/integration/flask/test_workload_tracing.py create mode 100644 tests/integration/integrations/conftest.py create mode 100644 tests/integration/integrations/test_tracing.py diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 09c9dbc..2997b8b 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -7,20 +7,14 @@ jobs: integration-tests: strategy: matrix: - param: - - juju-version: 3.3/stable - modules: '["test_charm.py", "test_cos.py", "test_database.py", "test_db_migration.py", "test_django.py", "test_django_integrations.py", "test_fastapi.py", "test_go.py", "test_integrations.py", "test_proxy.py", "test_workers.py"]' - - juju-version: 3.4/stable - modules: '["test_charm.py", "test_cos.py", "test_database.py", "test_db_migration.py", "test_django.py", "test_django_integrations.py", "test_fastapi.py", "test_go.py", "test_integrations.py", "test_proxy.py", "test_workers.py", "test_workload_tracing.py"]' - - juju-version: 3.5/stable - modules: '["test_charm.py", "test_cos.py", "test_database.py", "test_db_migration.py", "test_django.py", "test_django_integrations.py", "test_fastapi.py", "test_go.py", "test_integrations.py", "test_proxy.py", "test_workers.py", "test_workload_tracing.py"]' + juju-version: [ 3.3/stable, 3.4/stable, 3.5/stable ] uses: canonical/operator-workflows/.github/workflows/integration_test.yaml@main secrets: inherit with: extra-arguments: -x --localstack-address 172.17.0.1 pre-run-script: localstack-installation.sh charmcraft-channel: latest/edge - modules: ${{ matrix.param.modules }} + modules: '["test_charm.py", "test_cos.py", "test_database.py", "test_db_migration.py", "test_django.py", "test_django_integrations.py", "test_fastapi.py", "test_go.py", "test_integrations.py", "test_proxy.py", "test_workers.py", "test_tracing.py"]' rockcraft-channel: latest/edge juju-channel: ${{ matrix.juju-version }} channel: 1.29-strict/stable diff --git a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py index 86e13e8..965bd40 100644 --- a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py +++ b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py @@ -28,6 +28,6 @@ path("len/users", user_count, name="user_count"), path("environ", environ, name="environ"), path("sleep", sleep, name="sleep"), - path("hello", my_view, name="my_view"), + path("", my_view, name="my_view"), path("login", login, name="login"), ] diff --git a/examples/fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py b/examples/fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py index 582a31f..5fb2cae 100644 --- a/examples/fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py +++ b/examples/fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py @@ -56,13 +56,14 @@ def _on_ingress_revoked(self, event: IngressPerAppRevokedEvent): import socket import typing from dataclasses import dataclass +from functools import partial from typing import Any, Callable, Dict, List, MutableMapping, Optional, Sequence, Tuple, Union import pydantic from ops.charm import CharmBase, RelationBrokenEvent, RelationEvent from ops.framework import EventSource, Object, ObjectEvents, StoredState from ops.model import ModelError, Relation, Unit -from pydantic import AnyHttpUrl, BaseModel, Field, validator +from pydantic import AnyHttpUrl, BaseModel, Field # The unique Charmhub library identifier, never change it LIBID = "e6de2a5cd5b34422a204668f3b8f90d2" @@ -72,7 +73,7 @@ def _on_ingress_revoked(self, event: IngressPerAppRevokedEvent): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 13 +LIBPATCH = 14 PYDEPS = ["pydantic"] @@ -84,6 +85,9 @@ def _on_ingress_revoked(self, event: IngressPerAppRevokedEvent): PYDANTIC_IS_V1 = int(pydantic.version.VERSION.split(".")[0]) < 2 if PYDANTIC_IS_V1: + from pydantic import validator + + input_validator = partial(validator, pre=True) class DatabagModel(BaseModel): # type: ignore """Base databag model.""" @@ -143,7 +147,9 @@ def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): return databag else: - from pydantic import ConfigDict + from pydantic import ConfigDict, field_validator + + input_validator = partial(field_validator, mode="before") class DatabagModel(BaseModel): """Base databag model.""" @@ -171,7 +177,7 @@ def load(cls, databag: MutableMapping): k: json.loads(v) for k, v in databag.items() # Don't attempt to parse model-external values - if k in {(f.alias or n) for n, f in cls.__fields__.items()} # type: ignore + if k in {(f.alias or n) for n, f in cls.model_fields.items()} # type: ignore } except json.JSONDecodeError as e: msg = f"invalid databag contents: expecting json. {databag}" @@ -252,14 +258,14 @@ class IngressRequirerAppData(DatabagModel): default="http", description="What scheme to use in the generated ingress url" ) - @validator("scheme", pre=True) + @input_validator("scheme") def validate_scheme(cls, scheme): # noqa: N805 # pydantic wants 'cls' as first arg """Validate scheme arg.""" if scheme not in {"http", "https", "h2c"}: raise ValueError("invalid scheme: should be one of `http|https|h2c`") return scheme - @validator("port", pre=True) + @input_validator("port") def validate_port(cls, port): # noqa: N805 # pydantic wants 'cls' as first arg """Validate port.""" assert isinstance(port, int), type(port) @@ -277,13 +283,13 @@ class IngressRequirerUnitData(DatabagModel): "IP can only be None if the IP information can't be retrieved from juju.", ) - @validator("host", pre=True) + @input_validator("host") def validate_host(cls, host): # noqa: N805 # pydantic wants 'cls' as first arg """Validate host.""" assert isinstance(host, str), type(host) return host - @validator("ip", pre=True) + @input_validator("ip") def validate_ip(cls, ip): # noqa: N805 # pydantic wants 'cls' as first arg """Validate ip.""" if ip is None: @@ -462,7 +468,10 @@ def _handle_relation(self, event): event.relation, data.app.name, data.app.model, - [unit.dict() for unit in data.units], + [ + unit.dict() if PYDANTIC_IS_V1 else unit.model_dump(mode="json") + for unit in data.units + ], data.app.strip_prefix or False, data.app.redirect_https or False, ) diff --git a/tests/integration/django/conftest.py b/tests/integration/django/conftest.py index 343fb2c..4bb9599 100644 --- a/tests/integration/django/conftest.py +++ b/tests/integration/django/conftest.py @@ -23,15 +23,6 @@ def cwd(): return os.chdir(PROJECT_ROOT / "examples/django/charm") -@pytest.fixture(scope="module", name="django_tracing_app_image") -def fixture_django_tracing_app_image(pytestconfig: Config): - """Return the --django-tracing-app-image test parameter.""" - image = pytestconfig.getoption("--django-tracing-app-image") - if not image: - raise ValueError("the following arguments are required: --django-tracing-app-image") - return image - - @pytest.fixture(scope="module", name="django_app_image") def fixture_django_app_image(pytestconfig: Config): """Return the --django-app-image test parameter.""" diff --git a/tests/integration/fastapi/conftest.py b/tests/integration/fastapi/conftest.py index c27523d..e0e56dc 100644 --- a/tests/integration/fastapi/conftest.py +++ b/tests/integration/fastapi/conftest.py @@ -22,15 +22,6 @@ def cwd(): return os.chdir(PROJECT_ROOT / "examples/fastapi/charm") -@pytest.fixture(scope="module", name="fastapi_tracing_app_image") -def fixture_fastapi_tracing_app_image(pytestconfig: Config): - """Return the --fastapi-tracing-app-image test parameter.""" - image = pytestconfig.getoption("--fastapi-tracing-app-image") - if not image: - raise ValueError("the following arguments are required: --fastapi-tracing-app-image") - return image - - @pytest.fixture(scope="module", name="fastapi_app_image") def fixture_fastapi_app_image(pytestconfig: Config): """Return the --fastapi-app-image test parameter.""" diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index effb079..d8f558d 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -27,15 +27,6 @@ def cwd(): return os.chdir(PROJECT_ROOT / "examples/flask") -@pytest.fixture(scope="module", name="test_tracing_flask_image") -def fixture_test_tracing_flask_image(pytestconfig: Config): - """Return the --test-flask-tracing-image test parameter.""" - test_flask_image = pytestconfig.getoption("--test-tracing-flask-image") - if not test_flask_image: - raise ValueError("the following arguments are required: --test-tracing-flask-image") - return test_flask_image - - @pytest.fixture(scope="module", name="test_async_flask_image") def fixture_test_async_flask_image(pytestconfig: Config): """Return the --test-async-flask-image test parameter.""" @@ -145,95 +136,6 @@ async def flask_async_app_fixture(build_charm: str, model: Model, test_async_fla return app -@pytest_asyncio.fixture(scope="module", name="flask_tracing_app") -async def flask_tracing_app_fixture(build_charm: str, model: Model, test_tracing_flask_image: str): - """Build and deploy the flask charm with test-tracing-flask image.""" - app_name = "flask-tracing-k8s" - - resources = { - "flask-app-image": test_tracing_flask_image, - } - app = await model.deploy( - build_charm, resources=resources, application_name=app_name, series="jammy" - ) - await model.wait_for_idle(raise_on_blocked=True) - return app - - -async def deploy_and_configure_minio(ops_test: OpsTest, get_unit_ips) -> None: - """Deploy and set up minio and s3-integrator needed for s3-like storage backend in the HA charms.""" - config = { - "access-key": "accesskey", - "secret-key": "secretkey", - } - minio_app = await ops_test.model.deploy("minio", channel="edge", trust=True, config=config) - await ops_test.model.wait_for_idle( - apps=[minio_app.name], status="active", timeout=2000, idle_period=45 - ) - minio_addr = (await get_unit_ips(minio_app.name))[0] - - mc_client = Minio( - f"{minio_addr}:9000", - access_key="accesskey", - secret_key="secretkey", - secure=False, - ) - - # create tempo bucket - found = mc_client.bucket_exists("tempo") - if not found: - mc_client.make_bucket("tempo") - - # configure s3-integrator - s3_integrator_app: Application = ops_test.model.applications["s3-integrator"] - s3_integrator_leader: Unit = s3_integrator_app.units[0] - - await s3_integrator_app.set_config( - { - "endpoint": f"minio-0.minio-endpoints.{ops_test.model.name}.svc.cluster.local:9000", - "bucket": "tempo", - } - ) - - action = await s3_integrator_leader.run_action("sync-s3-credentials", **config) - action_result = await action.wait() - assert action_result.status == "completed" - - -@pytest_asyncio.fixture(scope="module", name="tempo_app") -async def deploy_tempo_cluster(ops_test: OpsTest, get_unit_ips): - """Deploys tempo in its HA version together with minio and s3-integrator.""" - tempo_app = "tempo" - worker_app = "tempo-worker" - tempo_worker_charm_url, worker_channel = "tempo-worker-k8s", "edge" - tempo_coordinator_charm_url, coordinator_channel = "tempo-coordinator-k8s", "edge" - await ops_test.model.deploy( - tempo_worker_charm_url, application_name=worker_app, channel=worker_channel, trust=True - ) - app = await ops_test.model.deploy( - tempo_coordinator_charm_url, - application_name=tempo_app, - channel=coordinator_channel, - trust=True, - ) - await ops_test.model.deploy("s3-integrator", channel="edge") - - await ops_test.model.integrate(tempo_app + ":s3", "s3-integrator" + ":s3-credentials") - await ops_test.model.integrate(tempo_app + ":tempo-cluster", worker_app + ":tempo-cluster") - - await deploy_and_configure_minio(ops_test, get_unit_ips) - async with ops_test.fast_forward(): - await ops_test.model.wait_for_idle( - apps=[tempo_app, worker_app, "s3-integrator"], - status="active", - timeout=2000, - idle_period=30, - # TODO: remove when https://github.com/canonical/tempo-coordinator-k8s-operator/issues/90 is fixed - raise_on_error=False, - ) - return app - - @pytest_asyncio.fixture(scope="module", name="traefik_app") async def deploy_traefik_fixture( model: Model, diff --git a/tests/integration/flask/requirements.txt b/tests/integration/flask/requirements.txt index 099fdcd..a481947 100644 --- a/tests/integration/flask/requirements.txt +++ b/tests/integration/flask/requirements.txt @@ -2,3 +2,4 @@ ops >= 1.5.0 pytest-operator >= 0.32.0 aiohttp == 3.11.7 tenacity +nest_asyncio diff --git a/tests/integration/flask/test_workload_tracing.py b/tests/integration/flask/test_workload_tracing.py deleted file mode 100644 index 3ed3213..0000000 --- a/tests/integration/flask/test_workload_tracing.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -"""Integration tests for Flask workers and schedulers.""" - -import asyncio -import json -import logging -import time - -import aiohttp -import pytest -from juju.application import Application -from juju.model import Model -from pytest_operator.plugin import OpsTest - -from tests.integration.helpers import get_traces_patiently - -logger = logging.getLogger(__name__) - - -@pytest.mark.usefixtures("flask_tracing_app") -@pytest.mark.usefixtures("tempo_app") -async def test_workload_tracing( - ops_test: OpsTest, - model: Model, - flask_tracing_app: Application, - tempo_app: Application, - get_unit_ips, -): - """ - arrange: Flask is deployed with async enabled rock. Change gunicorn worker class. - act: Do 15 requests that would take 2 seconds each. - assert: All 15 requests should be served in under 3 seconds. - """ - await ops_test.model.integrate( - f"{flask_tracing_app.name}:tracing", f"{tempo_app.name}:tracing" - ) - - await ops_test.model.wait_for_idle( - apps=[flask_tracing_app.name, tempo_app.name], status="active", timeout=300 - ) - # the flask unit is not important. Take the first one - flask_unit_ip = (await get_unit_ips(flask_tracing_app.name))[0] - tempo_host = (await get_unit_ips(tempo_app.name))[0] - - async def _fetch_page(session): - async with session.get(f"http://{flask_unit_ip}:8000") as response: - return await response.text() - - async with aiohttp.ClientSession() as session: - pages = [_fetch_page(session) for _ in range(5)] - await asyncio.gather(*pages) - - # verify workload traces are ingested into Tempo - assert await get_traces_patiently(tempo_host, flask_tracing_app.name) diff --git a/tests/integration/go/conftest.py b/tests/integration/go/conftest.py index e564046..bbfe047 100644 --- a/tests/integration/go/conftest.py +++ b/tests/integration/go/conftest.py @@ -22,15 +22,6 @@ def cwd(): return os.chdir(PROJECT_ROOT / "examples/go/charm") -@pytest.fixture(scope="module", name="go_tracing_app_image") -def fixture_go_tracing_app_image(pytestconfig: Config): - """Return the --go-tracing-app-image test parameter.""" - image = pytestconfig.getoption("--go-tracing-app-image") - if not image: - raise ValueError("the following arguments are required: --go-tracing-app-image") - return image - - @pytest.fixture(scope="module", name="go_app_image") def fixture_go_app_image(pytestconfig: Config): """Return the --go-app-image test parameter.""" diff --git a/tests/integration/integrations/conftest.py b/tests/integration/integrations/conftest.py new file mode 100644 index 0000000..7130d6a --- /dev/null +++ b/tests/integration/integrations/conftest.py @@ -0,0 +1,268 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Fixtures for flask charm integration tests.""" + +import os +import pathlib +from secrets import token_hex + +import boto3 +import pytest +import pytest_asyncio +from botocore.config import Config as BotoConfig +from juju.application import Application +from juju.model import Model +from minio import Minio +from ops import JujuVersion +from pytest import Config, FixtureRequest +from pytest_operator.plugin import OpsTest + +from tests.integration.helpers import inject_charm_config, inject_venv + +PROJECT_ROOT = pathlib.Path(__file__).parent.parent.parent.parent + +import nest_asyncio + +nest_asyncio.apply() + + +@pytest.fixture(autouse=True) +def skip_by_juju_version(request, model): + if request.node.get_closest_marker("skip_juju_version"): + current_version = JujuVersion( + f"{model.info.agent_version.major}.{model.info.agent_version.minor}.{model.info.agent_version.patch}" + ) + min_version = JujuVersion(request.node.get_closest_marker("skip_juju_version").args[0]) + if current_version < min_version: + pytest.skip("Juju version is too old") + + +def pytest_configure(config): + config.addinivalue_line( + "markers", + "skip_juju_version(version): skip test if Juju version is lower than version", + ) + + +@pytest.fixture(autouse=True) +def cwd(): + return os.chdir(PROJECT_ROOT / "examples/flask") + + +@pytest.fixture(scope="module", name="test_tracing_flask_image") +def fixture_test_tracing_flask_image(pytestconfig: Config): + """Return the --test-flask-tracing-image test parameter.""" + test_flask_image = pytestconfig.getoption("--test-tracing-flask-image") + if not test_flask_image: + raise ValueError("the following arguments are required: --test-tracing-flask-image") + return test_flask_image + + +@pytest.fixture(scope="module", name="django_tracing_app_image") +def fixture_django_tracing_app_image(pytestconfig: Config): + """Return the --django-tracing-app-image test parameter.""" + image = pytestconfig.getoption("--django-tracing-app-image") + if not image: + raise ValueError("the following arguments are required: --django-tracing-app-image") + return image + + +@pytest.fixture(scope="module", name="fastapi_tracing_app_image") +def fixture_fastapi_tracing_app_image(pytestconfig: Config): + """Return the --fastapi-tracing-app-image test parameter.""" + image = pytestconfig.getoption("--fastapi-tracing-app-image") + if not image: + raise ValueError("the following arguments are required: --fastapi-tracing-app-image") + return image + + +@pytest.fixture(scope="module", name="go_tracing_app_image") +def fixture_go_tracing_app_image(pytestconfig: Config): + """Return the --go-tracing-app-image test parameter.""" + image = pytestconfig.getoption("--go-tracing-app-image") + if not image: + raise ValueError("the following arguments are required: --go-tracing-app-image") + return image + + +async def build_charm_file( + pytestconfig: pytest.Config, ops_test: OpsTest, tmp_path_factory, framework +) -> str: + """Get the existing charm file.""" + charm_file = next( + (f for f in pytestconfig.getoption("--charm-file") if f"/{framework}-k8s" in f), None + ) + + if not charm_file: + charm_location = PROJECT_ROOT / f"examples/{framework}/charm" + if framework == "flask": + charm_location = PROJECT_ROOT / f"examples/{framework}" + charm_file = await ops_test.build_charm(charm_location) + elif charm_file[0] != "/": + charm_file = PROJECT_ROOT / charm_file + inject_venv(charm_file, PROJECT_ROOT / "src" / "paas_charm") + return pathlib.Path(charm_file).absolute() + + +@pytest_asyncio.fixture(scope="module", name="flask_tracing_app") +async def flask_tracing_app_fixture( + pytestconfig: pytest.Config, + ops_test: OpsTest, + tmp_path_factory, + model: Model, + test_tracing_flask_image: str, +): + """Build and deploy the flask charm with test-tracing-flask image.""" + app_name = "flask-tracing-k8s" + + resources = { + "flask-app-image": test_tracing_flask_image, + } + charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "flask") + app = await model.deploy( + charm_file, resources=resources, application_name=app_name, series="jammy" + ) + await model.wait_for_idle(raise_on_blocked=True) + return app + + +@pytest_asyncio.fixture(scope="module", name="django_tracing_app") +async def django_tracing_app_fixture( + pytestconfig: pytest.Config, + ops_test: OpsTest, + tmp_path_factory, + model: Model, + django_tracing_app_image: str, +): + """Build and deploy the Django charm with django-tracing-app image.""" + app_name = "django-tracing-k8s" + + resources = { + "django-app-image": django_tracing_app_image, + } + charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "django") + + app = await model.deploy( + charm_file, + resources=resources, + config={"django-allowed-hosts": "*"}, + application_name=app_name, + series="jammy", + ) + # await model.wait_for_idle(raise_on_blocked=True) + return app + + +@pytest_asyncio.fixture(scope="module", name="fastapi_tracing_app") +async def fastapi_tracing_app_fixture( + pytestconfig: pytest.Config, + ops_test: OpsTest, + tmp_path_factory, + model: Model, + fastapi_tracing_app_image: str, +): + """Build and deploy the FastAPI charm with fastapi-tracing-app image.""" + app_name = "fastapi-tracing-k8s" + + resources = { + "app-image": fastapi_tracing_app_image, + } + charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "fastapi") + app = await model.deploy(charm_file, resources=resources, application_name=app_name) + # await model.wait_for_idle(raise_on_blocked=True) + return app + + +@pytest_asyncio.fixture(scope="module", name="go_tracing_app") +async def go_tracing_app_fixture( + pytestconfig: pytest.Config, + ops_test: OpsTest, + tmp_path_factory, + model: Model, + go_tracing_app_image: str, +): + """Build and deploy the Go charm with go-tracing-app image.""" + app_name = "go-tracing-k8s" + + resources = { + "app-image": go_tracing_app_image, + } + charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "go") + app = await model.deploy(charm_file, resources=resources, application_name=app_name) + # await model.wait_for_idle(raise_on_blocked=True) + return app + + +async def deploy_and_configure_minio(ops_test: OpsTest, get_unit_ips) -> None: + """Deploy and set up minio and s3-integrator needed for s3-like storage backend in the HA charms.""" + config = { + "access-key": "accesskey", + "secret-key": "secretkey", + } + minio_app = await ops_test.model.deploy("minio", channel="edge", trust=True, config=config) + await ops_test.model.wait_for_idle( + apps=[minio_app.name], status="active", timeout=2000, idle_period=45 + ) + minio_addr = (await get_unit_ips(minio_app.name))[0] + + mc_client = Minio( + f"{minio_addr}:9000", + access_key="accesskey", + secret_key="secretkey", + secure=False, + ) + + # create tempo bucket + found = mc_client.bucket_exists("tempo") + if not found: + mc_client.make_bucket("tempo") + + # configure s3-integrator + s3_integrator_app: Application = ops_test.model.applications["s3-integrator"] + s3_integrator_leader: Unit = s3_integrator_app.units[0] + + await s3_integrator_app.set_config( + { + "endpoint": f"minio-0.minio-endpoints.{ops_test.model.name}.svc.cluster.local:9000", + "bucket": "tempo", + } + ) + + action = await s3_integrator_leader.run_action("sync-s3-credentials", **config) + action_result = await action.wait() + assert action_result.status == "completed" + + +@pytest_asyncio.fixture(scope="module", name="tempo_app") +async def deploy_tempo_cluster(ops_test: OpsTest, get_unit_ips): + """Deploys tempo in its HA version together with minio and s3-integrator.""" + tempo_app = "tempo" + worker_app = "tempo-worker" + tempo_worker_charm_url, worker_channel = "tempo-worker-k8s", "edge" + tempo_coordinator_charm_url, coordinator_channel = "tempo-coordinator-k8s", "edge" + await ops_test.model.deploy( + tempo_worker_charm_url, application_name=worker_app, channel=worker_channel, trust=True + ) + app = await ops_test.model.deploy( + tempo_coordinator_charm_url, + application_name=tempo_app, + channel=coordinator_channel, + trust=True, + ) + await ops_test.model.deploy("s3-integrator", channel="edge") + + await ops_test.model.integrate(tempo_app + ":s3", "s3-integrator" + ":s3-credentials") + await ops_test.model.integrate(tempo_app + ":tempo-cluster", worker_app + ":tempo-cluster") + + await deploy_and_configure_minio(ops_test, get_unit_ips) + async with ops_test.fast_forward(): + await ops_test.model.wait_for_idle( + apps=[tempo_app, worker_app, "s3-integrator"], + status="active", + timeout=2000, + idle_period=30, + # TODO: remove when https://github.com/canonical/tempo-coordinator-k8s-operator/issues/90 is fixed + raise_on_error=False, + ) + return app diff --git a/tests/integration/integrations/test_tracing.py b/tests/integration/integrations/test_tracing.py new file mode 100644 index 0000000..dc478c2 --- /dev/null +++ b/tests/integration/integrations/test_tracing.py @@ -0,0 +1,90 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Integration tests for Flask workers and schedulers.""" + +import asyncio +import json +import logging +import time + +import aiohttp +import pytest +from juju.application import Application +from juju.model import Model +from pytest_operator.plugin import OpsTest + +from tests.integration.helpers import get_traces_patiently + +logger = logging.getLogger(__name__) + + +@pytest.mark.parametrize( + "tracing_app, port", + [ + ("flask_tracing_app", 8000), + ("django_tracing_app", 8000), + ("fastapi_tracing_app", 8080), + ("go_tracing_app", 8080), + ], +) +@pytest.mark.skip_juju_version("3.4") # Tempo only supports Juju>=3.4 +async def test_workload_tracing( + ops_test: OpsTest, + model: Model, + tracing_app: Application, + port: int, + request: pytest.FixtureRequest, + get_unit_ips, +): + """ + arrange: Flask is deployed with async enabled rock. Change gunicorn worker class. + act: Do 15 requests that would take 2 seconds each. + assert: All 15 requests should be served in under 3 seconds. + """ + + try: + tempo_app = await request.getfixturevalue("tempo_app") + except Exception as e: + print(f"Tempo is already deployed {e}") + + print(f"Tempo is ready") + tracing_app = request.getfixturevalue(tracing_app) + idle_list = [tracing_app.name] + + if tracing_app != "flask_tracing_app": + print("Deploying postgres") + try: + postgresql_app = request.getfixturevalue("postgresql_k8s") + except Exception as e: + print(f"Postgres is already deployed {e}") + print(f"Postgres is integrating with {tracing_app} ---- name: {tracing_app.name}") + await model.integrate(tracing_app.name, "postgresql-k8s") + idle_list.append("postgresql-k8s") + print(f"{idle_list=}") + await model.wait_for_idle(apps=idle_list, status="active", timeout=300) + print("ALL IDLE") + + tempo_app_name = "tempo" + + print(f"integrating {tracing_app.name}:tracing - {tempo_app_name}:tracing") + + await ops_test.model.integrate(f"{tracing_app.name}:tracing", f"{tempo_app_name}:tracing") + + await ops_test.model.wait_for_idle( + apps=[tracing_app.name, tempo_app_name], status="active", timeout=600 + ) + # the flask unit is not important. Take the first one + unit_ip = (await get_unit_ips(tracing_app.name))[0] + tempo_host = (await get_unit_ips(tempo_app_name))[0] + + async def _fetch_page(session): + async with session.get(f"http://{unit_ip}:{port}") as response: + return await response.text() + + async with aiohttp.ClientSession() as session: + pages = [_fetch_page(session) for _ in range(5)] + await asyncio.gather(*pages) + + # verify workload traces are ingested into Tempo + assert await get_traces_patiently(tempo_host, tracing_app.name) From e1624fe478f40a0c0b7056cd980b181632663912 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Thu, 16 Jan 2025 13:49:13 +0300 Subject: [PATCH 41/70] Chore(): Cleanup the code and update docstrings. --- src/paas_charm/charm_state.py | 2 +- tests/integration/conftest.py | 1 - tests/integration/flask/conftest.py | 1 - tests/integration/integrations/conftest.py | 4 +++- tests/integration/integrations/test_tracing.py | 17 +++++------------ tests/integration/{flask => }/requirements.txt | 5 +++-- tests/unit/flask/test_tracing.py | 1 - tox.ini | 5 ++--- 8 files changed, 14 insertions(+), 22 deletions(-) rename tests/integration/{flask => }/requirements.txt (50%) diff --git a/src/paas_charm/charm_state.py b/src/paas_charm/charm_state.py index 0ea2622..6c17fea 100644 --- a/src/paas_charm/charm_state.py +++ b/src/paas_charm/charm_state.py @@ -21,7 +21,7 @@ class TempoParameters(BaseModel): - """Configuration for accessing S3 bucket. + """Configuration for accessing Tempo service. Attributes: endpoint: Tempo endpoint URL to send the traces. diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index ac6ff6c..5e885f7 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -10,7 +10,6 @@ from juju.client.jujudata import FileJujuData from juju.juju import Juju from juju.model import Controller, Model -from ops import JujuVersion from pytest_operator.plugin import OpsTest logger = logging.getLogger(__name__) diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index d8f558d..9d56d29 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -13,7 +13,6 @@ from botocore.config import Config as BotoConfig from juju.application import Application from juju.model import Model -from minio import Minio from pytest import Config, FixtureRequest from pytest_operator.plugin import OpsTest diff --git a/tests/integration/integrations/conftest.py b/tests/integration/integrations/conftest.py index 7130d6a..865bdb3 100644 --- a/tests/integration/integrations/conftest.py +++ b/tests/integration/integrations/conftest.py @@ -29,6 +29,7 @@ @pytest.fixture(autouse=True) def skip_by_juju_version(request, model): + """Skip the test if juju version is lower then the `skip_juju_version` marker value.""" if request.node.get_closest_marker("skip_juju_version"): current_version = JujuVersion( f"{model.info.agent_version.major}.{model.info.agent_version.minor}.{model.info.agent_version.patch}" @@ -39,6 +40,7 @@ def skip_by_juju_version(request, model): def pytest_configure(config): + """Add new marker.""" config.addinivalue_line( "markers", "skip_juju_version(version): skip test if Juju version is lower than version", @@ -89,7 +91,7 @@ def fixture_go_tracing_app_image(pytestconfig: Config): async def build_charm_file( pytestconfig: pytest.Config, ops_test: OpsTest, tmp_path_factory, framework ) -> str: - """Get the existing charm file.""" + """Get the existing charm file if exists, build a new one if not.""" charm_file = next( (f for f in pytestconfig.getoption("--charm-file") if f"/{framework}-k8s" in f), None ) diff --git a/tests/integration/integrations/test_tracing.py b/tests/integration/integrations/test_tracing.py index dc478c2..086c49f 100644 --- a/tests/integration/integrations/test_tracing.py +++ b/tests/integration/integrations/test_tracing.py @@ -38,37 +38,30 @@ async def test_workload_tracing( get_unit_ips, ): """ - arrange: Flask is deployed with async enabled rock. Change gunicorn worker class. - act: Do 15 requests that would take 2 seconds each. - assert: All 15 requests should be served in under 3 seconds. + arrange: Deploy Tempo cluster, app to test and postgres if required. + act: Send 5 requests to the app. + assert: Tempo should have tracing info about the app. """ try: tempo_app = await request.getfixturevalue("tempo_app") except Exception as e: - print(f"Tempo is already deployed {e}") + logger.info(f"Tempo is already deployed {e}") - print(f"Tempo is ready") tracing_app = request.getfixturevalue(tracing_app) idle_list = [tracing_app.name] if tracing_app != "flask_tracing_app": - print("Deploying postgres") try: postgresql_app = request.getfixturevalue("postgresql_k8s") except Exception as e: - print(f"Postgres is already deployed {e}") - print(f"Postgres is integrating with {tracing_app} ---- name: {tracing_app.name}") + logger.info(f"Postgres is already deployed {e}") await model.integrate(tracing_app.name, "postgresql-k8s") idle_list.append("postgresql-k8s") - print(f"{idle_list=}") await model.wait_for_idle(apps=idle_list, status="active", timeout=300) - print("ALL IDLE") tempo_app_name = "tempo" - print(f"integrating {tracing_app.name}:tracing - {tempo_app_name}:tracing") - await ops_test.model.integrate(f"{tracing_app.name}:tracing", f"{tempo_app_name}:tracing") await ops_test.model.wait_for_idle( diff --git a/tests/integration/flask/requirements.txt b/tests/integration/requirements.txt similarity index 50% rename from tests/integration/flask/requirements.txt rename to tests/integration/requirements.txt index a481947..656c48e 100644 --- a/tests/integration/flask/requirements.txt +++ b/tests/integration/requirements.txt @@ -1,5 +1,6 @@ ops >= 1.5.0 pytest-operator >= 0.32.0 aiohttp == 3.11.7 -tenacity -nest_asyncio +tenacity == 9.0.0 +nest_asyncio == 1.6.0 +minio == 7.2.14 diff --git a/tests/unit/flask/test_tracing.py b/tests/unit/flask/test_tracing.py index ba4dcb9..126db90 100644 --- a/tests/unit/flask/test_tracing.py +++ b/tests/unit/flask/test_tracing.py @@ -34,6 +34,5 @@ def test_tracing_relation(harness: Harness): assert harness.model.unit.status == ops.ActiveStatus() service_env = container.get_plan().services["flask"].environment - print(service_env) assert service_env["OTEL_EXPORTER_OTLP_ENDPOINT"] == "http://test-ip:4318" assert service_env["OTEL_SERVICE_NAME"] == "flask-k8s" diff --git a/tox.ini b/tox.ini index 1e4fea3..c51bc2d 100644 --- a/tox.ini +++ b/tox.ini @@ -55,7 +55,7 @@ deps = types-requests types-PyYAML -r{toxinidir}/requirements.txt - -r{toxinidir}/tests/integration/flask/requirements.txt + -r{toxinidir}/tests/integration/requirements.txt commands = codespell {toxinidir} \ --skip {toxinidir}/.git \ @@ -111,9 +111,8 @@ deps = boto3 juju==3.5.2.1 git+https://github.com/canonical/saml-test-idp.git - minio -r{toxinidir}/requirements.txt - -r{toxinidir}/tests/integration/flask/requirements.txt + -r{toxinidir}/tests/integration/requirements.txt commands = pytest -v --tb native --ignore={[vars]tst_path}unit --log-cli-level=INFO -s {posargs} From 2650b7b83183c2fd198a2a6c944e8e9dad110b78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ali=20U=C4=9EUR?= <39213991+alithethird@users.noreply.github.com> Date: Thu, 16 Jan 2025 13:50:06 +0300 Subject: [PATCH 42/70] chore(): Update app.py Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- app.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/app.py b/app.py index c969e86..eb52333 100644 --- a/app.py +++ b/app.py @@ -1,3 +1,6 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. + from flask import Flask import time From ebcccf4e000e243ddb53f320c7562116a52ca432 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Thu, 16 Jan 2025 14:51:31 +0300 Subject: [PATCH 43/70] chore():Address comments --- tests/integration/integrations/conftest.py | 3 --- tests/integration/integrations/test_tracing.py | 6 +++--- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/tests/integration/integrations/conftest.py b/tests/integration/integrations/conftest.py index 865bdb3..ea3242a 100644 --- a/tests/integration/integrations/conftest.py +++ b/tests/integration/integrations/conftest.py @@ -152,7 +152,6 @@ async def django_tracing_app_fixture( application_name=app_name, series="jammy", ) - # await model.wait_for_idle(raise_on_blocked=True) return app @@ -172,7 +171,6 @@ async def fastapi_tracing_app_fixture( } charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "fastapi") app = await model.deploy(charm_file, resources=resources, application_name=app_name) - # await model.wait_for_idle(raise_on_blocked=True) return app @@ -192,7 +190,6 @@ async def go_tracing_app_fixture( } charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "go") app = await model.deploy(charm_file, resources=resources, application_name=app_name) - # await model.wait_for_idle(raise_on_blocked=True) return app diff --git a/tests/integration/integrations/test_tracing.py b/tests/integration/integrations/test_tracing.py index 086c49f..ae42650 100644 --- a/tests/integration/integrations/test_tracing.py +++ b/tests/integration/integrations/test_tracing.py @@ -27,6 +27,7 @@ ("fastapi_tracing_app", 8080), ("go_tracing_app", 8080), ], + indirect["tracing_app"], ) @pytest.mark.skip_juju_version("3.4") # Tempo only supports Juju>=3.4 async def test_workload_tracing( @@ -48,10 +49,9 @@ async def test_workload_tracing( except Exception as e: logger.info(f"Tempo is already deployed {e}") - tracing_app = request.getfixturevalue(tracing_app) idle_list = [tracing_app.name] - if tracing_app != "flask_tracing_app": + if tracing_app.name != "flask-tracing-k8s": try: postgresql_app = request.getfixturevalue("postgresql_k8s") except Exception as e: @@ -67,7 +67,7 @@ async def test_workload_tracing( await ops_test.model.wait_for_idle( apps=[tracing_app.name, tempo_app_name], status="active", timeout=600 ) - # the flask unit is not important. Take the first one + unit_ip = (await get_unit_ips(tracing_app.name))[0] tempo_host = (await get_unit_ips(tempo_app_name))[0] From cdb1902772e942260a2f389be2c1047484550d85 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Thu, 16 Jan 2025 22:48:25 +0300 Subject: [PATCH 44/70] Chore(): Fix --- tests/integration/integrations/test_tracing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/integrations/test_tracing.py b/tests/integration/integrations/test_tracing.py index ae42650..9db8ae9 100644 --- a/tests/integration/integrations/test_tracing.py +++ b/tests/integration/integrations/test_tracing.py @@ -27,7 +27,7 @@ ("fastapi_tracing_app", 8080), ("go_tracing_app", 8080), ], - indirect["tracing_app"], + indirect=["tracing_app"], ) @pytest.mark.skip_juju_version("3.4") # Tempo only supports Juju>=3.4 async def test_workload_tracing( From f7e96450678cf231f52113a20cc571e499b6fa6c Mon Sep 17 00:00:00 2001 From: ali ugur Date: Fri, 17 Jan 2025 07:01:50 +0300 Subject: [PATCH 45/70] chore(test): Improve tests --- .trivyignore | 6 +-- .../integration/integrations/test_tracing.py | 6 +-- tests/unit/flask/constants.py | 27 +++++++++++++ tests/unit/flask/test_tracing.py | 19 +++++++++ tests/unit/flask/test_webserver.py | 39 +++++++++++++++++-- 5 files changed, 85 insertions(+), 12 deletions(-) diff --git a/.trivyignore b/.trivyignore index 6c2a020..b88a77b 100644 --- a/.trivyignore +++ b/.trivyignore @@ -1,10 +1,6 @@ -# ignore CVE introduced by python3-gunicorn -CVE-2022-40897 # pypa/setuptools: Remote code execution via download CVE-2024-6345 # pebble: Calling Decoder.Decode on a message which contains deeply nested structures can cause a panic due to stack exhaustion CVE-2024-34156 -# pebble: Go stdlib -CVE-2024-45338 # go-app: Go crypto lib -CVE-2024-45337 +CVE-2024-45337 diff --git a/tests/integration/integrations/test_tracing.py b/tests/integration/integrations/test_tracing.py index 9db8ae9..d912085 100644 --- a/tests/integration/integrations/test_tracing.py +++ b/tests/integration/integrations/test_tracing.py @@ -20,20 +20,19 @@ @pytest.mark.parametrize( - "tracing_app, port", + "tracing_app_fixture, port", [ ("flask_tracing_app", 8000), ("django_tracing_app", 8000), ("fastapi_tracing_app", 8080), ("go_tracing_app", 8080), ], - indirect=["tracing_app"], ) @pytest.mark.skip_juju_version("3.4") # Tempo only supports Juju>=3.4 async def test_workload_tracing( ops_test: OpsTest, model: Model, - tracing_app: Application, + tracing_app_fixture: str, port: int, request: pytest.FixtureRequest, get_unit_ips, @@ -49,6 +48,7 @@ async def test_workload_tracing( except Exception as e: logger.info(f"Tempo is already deployed {e}") + tracing_app = request.getfixturevalue(tracing_app_fixture) idle_list = [tracing_app.name] if tracing_app.name != "flask-tracing-k8s": diff --git a/tests/unit/flask/constants.py b/tests/unit/flask/constants.py index 285ea32..af6712c 100644 --- a/tests/unit/flask/constants.py +++ b/tests/unit/flask/constants.py @@ -26,6 +26,33 @@ } } +LAYER_WITH_TRACING = { + "services": { + "flask": { + "override": "replace", + "startup": "enabled", + "command": f"/bin/python3 -m gunicorn -c /flask/gunicorn.conf.py app:app -k sync", + "after": ["statsd-exporter"], + "user": "_daemon_", + "environment": { + "OTEL_EXPORTER_OTLP_ENDPOINT": "http://test-ip:4318", + "OTEL_SERVICE_NAME": "flask-k8s", + }, + }, + "statsd-exporter": { + "override": "merge", + "command": ( + "/bin/statsd_exporter --statsd.mapping-config=/statsd-mapping.conf " + "--statsd.listen-udp=localhost:9125 " + "--statsd.listen-tcp=localhost:9125" + ), + "summary": "statsd exporter service", + "startup": "enabled", + "user": "_daemon_", + }, + } +} + LAYER_WITH_WORKER = { "services": { "flask": { diff --git a/tests/unit/flask/test_tracing.py b/tests/unit/flask/test_tracing.py index 126db90..ecc6976 100644 --- a/tests/unit/flask/test_tracing.py +++ b/tests/unit/flask/test_tracing.py @@ -36,3 +36,22 @@ def test_tracing_relation(harness: Harness): service_env = container.get_plan().services["flask"].environment assert service_env["OTEL_EXPORTER_OTLP_ENDPOINT"] == "http://test-ip:4318" assert service_env["OTEL_SERVICE_NAME"] == "flask-k8s" + + +def test_tracing_not_activated(harness: Harness): + """ + arrange: Deploy the charm without a relation to the Tempo charm. + act: Run all initial hooks. + assert: The flask service should not have the environment variables OTEL_EXPORTER_OTLP_ENDPOINT, OTEL_SERVICE_NAME. + """ + harness.set_model_name("flask-model") + + container = harness.model.unit.get_container(FLASK_CONTAINER_NAME) + container.add_layer("a_layer", DEFAULT_LAYER) + + harness.begin_with_initial_hooks() + + assert harness.model.unit.status == ops.ActiveStatus() + service_env = container.get_plan().services["flask"].environment + assert service_env.get("OTEL_EXPORTER_OTLP_ENDPOINT", None) is None + assert service_env.get("OTEL_SERVICE_NAME", None) is None diff --git a/tests/unit/flask/test_webserver.py b/tests/unit/flask/test_webserver.py index 3117a7a..5652484 100644 --- a/tests/unit/flask/test_webserver.py +++ b/tests/unit/flask/test_webserver.py @@ -19,11 +19,12 @@ from paas_charm.charm_state import CharmState from paas_charm.utils import enable_pebble_log_forwarding -from .constants import DEFAULT_LAYER, FLASK_CONTAINER_NAME +from .constants import DEFAULT_LAYER, FLASK_CONTAINER_NAME, LAYER_WITH_TRACING GUNICORN_CONFIG_TEST_PARAMS = [ pytest.param( {"workers": 10}, + DEFAULT_LAYER, textwrap.dedent( f"""\ bind = ['0.0.0.0:8000'] @@ -37,6 +38,7 @@ ), pytest.param( {"threads": 2, "timeout": 3, "keepalive": 4}, + DEFAULT_LAYER, textwrap.dedent( f"""\ bind = ['0.0.0.0:8000'] @@ -50,12 +52,41 @@ ), id="threads=2,timeout=3,keepalive=4", ), + pytest.param( + {}, + LAYER_WITH_TRACING, + textwrap.dedent( + f"""\ + bind = ['0.0.0.0:8000'] + chdir = '/flask/app' + accesslog = '/var/log/flask/access.log' + errorlog = '/var/log/flask/error.log' + statsd_host = 'localhost:9125' + from opentelemetry import trace + from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( + OTLPSpanExporter, + ) + from opentelemetry.sdk.trace import TracerProvider + from opentelemetry.sdk.trace.export import BatchSpanProcessor + + def post_fork(server, worker): + trace.set_tracer_provider(TracerProvider()) + span_processor = BatchSpanProcessor(OTLPSpanExporter()) + trace.get_tracer_provider().add_span_processor(span_processor) + """ + ), + id="with-tracing", + ), ] -@pytest.mark.parametrize("charm_state_params, config_file", GUNICORN_CONFIG_TEST_PARAMS) +@pytest.mark.parametrize("charm_state_params, layer, config_file", GUNICORN_CONFIG_TEST_PARAMS) def test_gunicorn_config( - harness: Harness, charm_state_params, config_file, database_migration_mock + harness: Harness, + charm_state_params, + layer, + config_file, + database_migration_mock, ) -> None: """ arrange: create the Gunicorn webserver object with a controlled charm state generated by the @@ -66,7 +97,7 @@ def test_gunicorn_config( harness.begin() container: ops.Container = harness.model.unit.get_container(FLASK_CONTAINER_NAME) harness.set_can_connect(FLASK_CONTAINER_NAME, True) - container.add_layer("default", DEFAULT_LAYER) + container.add_layer("default", layer) charm_state = CharmState( framework="flask", From a5bd4feff13fb3c48202416ec3820309c3e3f421 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Fri, 17 Jan 2025 08:32:39 +0300 Subject: [PATCH 46/70] chore(trivy): Add back Go stdlib ignore --- .trivyignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.trivyignore b/.trivyignore index b88a77b..8d0e47a 100644 --- a/.trivyignore +++ b/.trivyignore @@ -2,5 +2,7 @@ CVE-2024-6345 # pebble: Calling Decoder.Decode on a message which contains deeply nested structures can cause a panic due to stack exhaustion CVE-2024-34156 +# pebble: Go stdlib +CVE-2024-45338 # go-app: Go crypto lib CVE-2024-45337 From 4f05ded9007b23d6108fbd7423fbe2736b24d0b4 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Fri, 17 Jan 2025 09:20:28 +0300 Subject: [PATCH 47/70] chore(trivy): Add back --- .trivyignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.trivyignore b/.trivyignore index 8d0e47a..df6dc3a 100644 --- a/.trivyignore +++ b/.trivyignore @@ -1,3 +1,5 @@ +# ignore CVE introduced by python3-gunicorn +CVE-2022-40897 # pypa/setuptools: Remote code execution via download CVE-2024-6345 # pebble: Calling Decoder.Decode on a message which contains deeply nested structures can cause a panic due to stack exhaustion From 4fa1502043aa2f77d60699e39141af59a32859c6 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 20 Jan 2025 07:29:17 +0300 Subject: [PATCH 48/70] chore(test): Remove extra examples. Integrate tracing into default example --- .../django_app/django_app/django_app/urls.py | 3 +- .../django_app/django_app/testing/views.py | 10 + examples/django/django_app/requirements.txt | 6 + .../django_tracing_app/__init__.py | 2 - .../django_tracing_app/asgi.py | 19 - .../django_tracing_app/settings.py | 130 ------ .../django_tracing_app/urls.py | 33 -- .../django_tracing_app/wsgi.py | 19 - .../django_tracing_app/manage.py | 26 -- .../django_tracing_app/migrate.sh | 5 - .../django_tracing_app/testing/__init__.py | 2 - .../django_tracing_app/testing/admin.py | 6 - .../django_tracing_app/testing/apps.py | 9 - .../testing/migrations/__init__.py | 2 - .../django_tracing_app/testing/models.py | 6 - .../django_tracing_app/testing/tests.py | 6 - .../django_tracing_app/testing/views.py | 49 --- .../django_tracing_app/requirements.txt | 9 - .../django/django_tracing_app/rockcraft.yaml | 14 - examples/fastapi/fastapi_app/app.py | 15 +- examples/fastapi/fastapi_app/requirements.txt | 5 + .../fastapi/fastapi_tracing_app/alembic.ini | 5 - .../fastapi_tracing_app/alembic/env.py | 24 -- .../eca6177bd16a_initial_migration.py | 33 -- examples/fastapi/fastapi_tracing_app/app.py | 56 --- .../fastapi/fastapi_tracing_app/migrate.sh | 5 - .../fastapi_tracing_app/requirements.txt | 11 - .../fastapi_tracing_app/rockcraft.yaml | 25 -- examples/flask/test_rock/app.py | 35 ++ examples/flask/test_rock/requirements.txt | 4 - examples/flask/test_tracing_rock/app.py | 395 ------------------ .../flask/test_tracing_rock/requirements.txt | 18 - .../flask/test_tracing_rock/rockcraft.yaml | 27 -- examples/go/go_app/go.mod | 36 +- examples/go/go_app/go.sum | 50 +++ .../go/go_app/internal/service/service.go | 23 + examples/go/go_app/main.go | 53 ++- examples/go/go_tracing_app/go.mod | 46 -- examples/go/go_tracing_app/go.sum | 96 ----- .../internal/service/service.go | 61 --- examples/go/go_tracing_app/main.go | 181 -------- examples/go/go_tracing_app/migrate.sh | 5 - examples/go/go_tracing_app/rockcraft.yaml | 60 --- tests/conftest.py | 4 - tests/integration/conftest.py | 144 +++++++ tests/integration/flask/conftest.py | 10 - tests/integration/integrations/conftest.py | 143 ------- .../integration/integrations/test_tracing.py | 10 +- 48 files changed, 373 insertions(+), 1563 deletions(-) delete mode 100644 examples/django/django_tracing_app/django_tracing_app/django_tracing_app/__init__.py delete mode 100644 examples/django/django_tracing_app/django_tracing_app/django_tracing_app/asgi.py delete mode 100644 examples/django/django_tracing_app/django_tracing_app/django_tracing_app/settings.py delete mode 100644 examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py delete mode 100644 examples/django/django_tracing_app/django_tracing_app/django_tracing_app/wsgi.py delete mode 100755 examples/django/django_tracing_app/django_tracing_app/manage.py delete mode 100644 examples/django/django_tracing_app/django_tracing_app/migrate.sh delete mode 100644 examples/django/django_tracing_app/django_tracing_app/testing/__init__.py delete mode 100644 examples/django/django_tracing_app/django_tracing_app/testing/admin.py delete mode 100644 examples/django/django_tracing_app/django_tracing_app/testing/apps.py delete mode 100644 examples/django/django_tracing_app/django_tracing_app/testing/migrations/__init__.py delete mode 100644 examples/django/django_tracing_app/django_tracing_app/testing/models.py delete mode 100644 examples/django/django_tracing_app/django_tracing_app/testing/tests.py delete mode 100644 examples/django/django_tracing_app/django_tracing_app/testing/views.py delete mode 100644 examples/django/django_tracing_app/requirements.txt delete mode 100644 examples/django/django_tracing_app/rockcraft.yaml delete mode 100644 examples/fastapi/fastapi_tracing_app/alembic.ini delete mode 100644 examples/fastapi/fastapi_tracing_app/alembic/env.py delete mode 100644 examples/fastapi/fastapi_tracing_app/alembic/versions/eca6177bd16a_initial_migration.py delete mode 100644 examples/fastapi/fastapi_tracing_app/app.py delete mode 100644 examples/fastapi/fastapi_tracing_app/migrate.sh delete mode 100644 examples/fastapi/fastapi_tracing_app/requirements.txt delete mode 100644 examples/fastapi/fastapi_tracing_app/rockcraft.yaml delete mode 100644 examples/flask/test_tracing_rock/app.py delete mode 100644 examples/flask/test_tracing_rock/requirements.txt delete mode 100644 examples/flask/test_tracing_rock/rockcraft.yaml delete mode 100644 examples/go/go_tracing_app/go.mod delete mode 100644 examples/go/go_tracing_app/go.sum delete mode 100644 examples/go/go_tracing_app/internal/service/service.go delete mode 100644 examples/go/go_tracing_app/main.go delete mode 100755 examples/go/go_tracing_app/migrate.sh delete mode 100644 examples/go/go_tracing_app/rockcraft.yaml diff --git a/examples/django/django_app/django_app/django_app/urls.py b/examples/django/django_app/django_app/django_app/urls.py index f43a34e..287cf78 100644 --- a/examples/django/django_app/django_app/django_app/urls.py +++ b/examples/django/django_app/django_app/django_app/urls.py @@ -20,13 +20,14 @@ from django.contrib import admin from django.urls import path -from testing.views import environ, get_settings, login, sleep, user_count +from testing.views import environ, get_settings, hello_world, login, sleep, user_count urlpatterns = [ path("admin/", admin.site.urls), path("settings/", get_settings, name="get_settings"), path("len/users", user_count, name="user_count"), path("environ", environ, name="environ"), + path("", hello_world, name="hello_world"), path("sleep", sleep, name="sleep"), path("login", login, name="login"), ] diff --git a/examples/django/django_app/django_app/testing/views.py b/examples/django/django_app/django_app/testing/views.py index 9001f17..af8fe74 100644 --- a/examples/django/django_app/django_app/testing/views.py +++ b/examples/django/django_app/django_app/testing/views.py @@ -8,6 +8,9 @@ from django.contrib.auth import authenticate from django.contrib.auth.models import User from django.http import HttpResponse, JsonResponse +from opentelemetry import trace + +tracer = trace.get_tracer(__name__) def environ(request): @@ -25,6 +28,13 @@ def get_settings(request, name): return JsonResponse({"error": f"settings {name!r} not found"}, status=404) +def hello_world(request): + # Create a custom span + with tracer.start_as_current_span("custom-span"): + print("Hello, World!!!") + return HttpResponse("Hello, World!") + + def sleep(request): duration = request.GET.get("duration") time.sleep(int(duration)) diff --git a/examples/django/django_app/requirements.txt b/examples/django/django_app/requirements.txt index 2efd6d5..f026b65 100644 --- a/examples/django/django_app/requirements.txt +++ b/examples/django/django_app/requirements.txt @@ -1,3 +1,9 @@ Django tzdata psycopg2-binary +opentelemetry-api +opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http +opentelemetry-instrumentation +opentelemetry-instrumentation-wsgi +opentelemetry-sdk diff --git a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/__init__.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/__init__.py deleted file mode 100644 index dddb292..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. diff --git a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/asgi.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/asgi.py deleted file mode 100644 index b31b018..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/asgi.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -""" -ASGI config for django_tracing_app project. - -It exposes the ASGI callable as a module-level variable named ``application``. - -For more information on this file, see -https://docs.djangoproject.com/en/5.0/howto/deployment/asgi/ -""" - -import os - -from django.core.asgi import get_asgi_application - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_tracing_app.settings") - -application = get_asgi_application() diff --git a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/settings.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/settings.py deleted file mode 100644 index 9a63aa2..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/settings.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -""" -Django settings for django_tracing_app project. - -Generated by 'django-admin startproject' using Django 5.0.2. - -For more information on this file, see -https://docs.djangoproject.com/en/5.0/topics/settings/ - -For the full list of settings and their values, see -https://docs.djangoproject.com/en/5.0/ref/settings/ -""" - -import json -import os -import urllib.parse -from pathlib import Path - -# Build paths inside the project like this: BASE_DIR / 'subdir'. -BASE_DIR = Path(__file__).resolve().parent.parent - - -# Quick-start development settings - unsuitable for production -# See https://docs.djangoproject.com/en/5.0/howto/deployment/checklist/ - -# SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY", "secret") - -# SECURITY WARNING: don't run with debug turned on in production! -DEBUG = os.environ.get("DJANGO_DEBUG", "true") == "true" - -ALLOWED_HOSTS = json.loads(os.environ.get("DJANGO_ALLOWED_HOSTS", '["*"]')) - - -INSTALLED_APPS = [ - "django.contrib.admin", - "django.contrib.auth", - "django.contrib.contenttypes", - "django.contrib.sessions", - "django.contrib.messages", - "django.contrib.staticfiles", -] - -MIDDLEWARE = [ - "django.middleware.security.SecurityMiddleware", - "django.contrib.sessions.middleware.SessionMiddleware", - "django.middleware.common.CommonMiddleware", - "django.middleware.csrf.CsrfViewMiddleware", - "django.contrib.auth.middleware.AuthenticationMiddleware", - "django.contrib.messages.middleware.MessageMiddleware", - "django.middleware.clickjacking.XFrameOptionsMiddleware", -] - -ROOT_URLCONF = "django_tracing_app.urls" - -TEMPLATES = [ - { - "BACKEND": "django.template.backends.django.DjangoTemplates", - "DIRS": [], - "APP_DIRS": True, - "OPTIONS": { - "context_processors": [ - "django.template.context_processors.debug", - "django.template.context_processors.request", - "django.contrib.auth.context_processors.auth", - "django.contrib.messages.context_processors.messages", - ], - }, - }, -] - -WSGI_APPLICATION = "django_tracing_app.wsgi.application" - - -# Database -# https://docs.djangoproject.com/en/5.0/ref/settings/#databases -DATABASES = { - "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": os.environ.get("POSTGRESQL_DB_NAME"), - "USER": os.environ.get("POSTGRESQL_DB_USERNAME"), - "PASSWORD": os.environ.get("POSTGRESQL_DB_PASSWORD"), - "HOST": os.environ.get("POSTGRESQL_DB_HOSTNAME"), - "PORT": os.environ.get("POSTGRESQL_DB_PORT", "5432"), - } -} - - -# Password validation -# https://docs.djangoproject.com/en/5.0/ref/settings/#auth-password-validators - -AUTH_PASSWORD_VALIDATORS = [ - { - "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", - }, -] - - -# Internationalization -# https://docs.djangoproject.com/en/5.0/topics/i18n/ - -LANGUAGE_CODE = "en-us" - -TIME_ZONE = "UTC" - -USE_I18N = True - -USE_TZ = True - - -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/5.0/howto/static-files/ - -STATIC_URL = "static/" - -# Default primary key field type -# https://docs.djangoproject.com/en/5.0/ref/settings/#default-auto-field - -DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" diff --git a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py deleted file mode 100644 index 965bd40..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/urls.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -""" -URL configuration for django_tracing_app project. - -The `urlpatterns` list routes URLs to views. For more information please see: - https://docs.djangoproject.com/en/5.0/topics/http/urls/ -Examples: -Function views - 1. Add an import: from my_app import views - 2. Add a URL to urlpatterns: path('', views.home, name='home') -Class-based views - 1. Add an import: from other_app.views import Home - 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') -Including another URLconf - 1. Import the include() function: from django.urls import include, path - 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) -""" - -from django.contrib import admin -from django.urls import path -from testing.views import environ, get_settings, login, my_view, sleep, user_count - -urlpatterns = [ - path("admin/", admin.site.urls), - path("settings/", get_settings, name="get_settings"), - path("len/users", user_count, name="user_count"), - path("environ", environ, name="environ"), - path("sleep", sleep, name="sleep"), - path("", my_view, name="my_view"), - path("login", login, name="login"), -] diff --git a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/wsgi.py b/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/wsgi.py deleted file mode 100644 index e3bced4..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/django_tracing_app/wsgi.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -""" -WSGI config for django_tracing_app project. - -It exposes the WSGI callable as a module-level variable named ``application``. - -For more information on this file, see -https://docs.djangoproject.com/en/5.0/howto/deployment/wsgi/ -""" - -import os - -from django.core.wsgi import get_wsgi_application - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_tracing_app.settings") - -application = get_wsgi_application() diff --git a/examples/django/django_tracing_app/django_tracing_app/manage.py b/examples/django/django_tracing_app/django_tracing_app/manage.py deleted file mode 100755 index cf833a2..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/manage.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -"""Django's command-line utility for administrative tasks.""" -import os -import sys - - -def main(): - """Run administrative tasks.""" - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_tracing_app.settings") - try: - from django.core.management import execute_from_command_line - except ImportError as exc: - raise ImportError( - "Couldn't import Django. Are you sure it's installed and " - "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?" - ) from exc - execute_from_command_line(sys.argv) - - -if __name__ == "__main__": - main() diff --git a/examples/django/django_tracing_app/django_tracing_app/migrate.sh b/examples/django/django_tracing_app/django_tracing_app/migrate.sh deleted file mode 100644 index 6aaeec6..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/migrate.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/bash -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -python3 manage.py migrate diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/__init__.py b/examples/django/django_tracing_app/django_tracing_app/testing/__init__.py deleted file mode 100644 index dddb292..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/testing/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/admin.py b/examples/django/django_tracing_app/django_tracing_app/testing/admin.py deleted file mode 100644 index 56e199a..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/testing/admin.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -from django.contrib import admin - -# Register your models here. diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/apps.py b/examples/django/django_tracing_app/django_tracing_app/testing/apps.py deleted file mode 100644 index 6eb0613..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/testing/apps.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -from django.apps import AppConfig - - -class TestingConfig(AppConfig): - default_auto_field = "django.db.models.BigAutoField" - name = "testing" diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/migrations/__init__.py b/examples/django/django_tracing_app/django_tracing_app/testing/migrations/__init__.py deleted file mode 100644 index dddb292..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/testing/migrations/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/models.py b/examples/django/django_tracing_app/django_tracing_app/testing/models.py deleted file mode 100644 index d709735..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/testing/models.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -from django.db import models - -# Create your models here. diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/tests.py b/examples/django/django_tracing_app/django_tracing_app/testing/tests.py deleted file mode 100644 index b62faeb..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/testing/tests.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -from django.test import TestCase - -# Create your tests here. diff --git a/examples/django/django_tracing_app/django_tracing_app/testing/views.py b/examples/django/django_tracing_app/django_tracing_app/testing/views.py deleted file mode 100644 index ca83a1b..0000000 --- a/examples/django/django_tracing_app/django_tracing_app/testing/views.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -import os -import time - -from django.conf import settings -from django.contrib.auth import authenticate -from django.contrib.auth.models import User -from django.http import HttpResponse, JsonResponse -from opentelemetry import trace - -tracer = trace.get_tracer(__name__) - - -def environ(request): - return JsonResponse(dict(os.environ)) - - -def user_count(request): - return JsonResponse(User.objects.count(), safe=False) - - -def get_settings(request, name): - if hasattr(settings, name): - return JsonResponse(getattr(settings, name), safe=False) - else: - return JsonResponse({"error": f"settings {name!r} not found"}, status=404) - - -def my_view(request): - # Create a custom span - with tracer.start_as_current_span("custom-span"): - print("Hello, World!!!") - return HttpResponse("Hello, World!") - - -def sleep(request): - duration = request.GET.get("duration") - time.sleep(int(duration)) - return HttpResponse() - - -def login(request): - user = authenticate(username=request.GET.get("username"), password=request.GET.get("password")) - if user is not None: - return HttpResponse(status=200) - else: - return HttpResponse(status=403) diff --git a/examples/django/django_tracing_app/requirements.txt b/examples/django/django_tracing_app/requirements.txt deleted file mode 100644 index f026b65..0000000 --- a/examples/django/django_tracing_app/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -Django -tzdata -psycopg2-binary -opentelemetry-api -opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http -opentelemetry-instrumentation -opentelemetry-instrumentation-wsgi -opentelemetry-sdk diff --git a/examples/django/django_tracing_app/rockcraft.yaml b/examples/django/django_tracing_app/rockcraft.yaml deleted file mode 100644 index 3b165bd..0000000 --- a/examples/django/django_tracing_app/rockcraft.yaml +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -name: django-tracing-app -summary: Example Django application image. -description: Example Django application image. -version: "0.1" -base: ubuntu@22.04 -license: Apache-2.0 -platforms: - amd64: - -extensions: - - django-framework diff --git a/examples/fastapi/fastapi_app/app.py b/examples/fastapi/fastapi_app/app.py index 7ff5861..0bb3417 100644 --- a/examples/fastapi/fastapi_app/app.py +++ b/examples/fastapi/fastapi_app/app.py @@ -3,12 +3,24 @@ import os from fastapi import FastAPI, HTTPException +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter +from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter +from opentelemetry.trace import get_tracer_provider, set_tracer_provider from sqlalchemy import Column, Integer, String, create_engine, inspect from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker app = FastAPI() +set_tracer_provider(TracerProvider()) +get_tracer_provider().add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) +FastAPIInstrumentor.instrument_app(app) +tracer = trace.get_tracer(__name__) + engine = create_engine(os.environ["POSTGRESQL_DB_CONNECT_STRING"], echo=True) Session = scoped_session(sessionmaker(bind=engine)) @@ -26,7 +38,8 @@ class User(Base): @app.get("/") async def root(): - return "Hello, World!" + with tracer.start_as_current_span("custom-span"): + return "Hello, World!" @app.get("/env/user-defined-config") diff --git a/examples/fastapi/fastapi_app/requirements.txt b/examples/fastapi/fastapi_app/requirements.txt index a697414..dfb8412 100644 --- a/examples/fastapi/fastapi_app/requirements.txt +++ b/examples/fastapi/fastapi_app/requirements.txt @@ -2,3 +2,8 @@ fastapi SQLAlchemy alembic psycopg2-binary +opentelemetry-api +opentelemetry-sdk +opentelemetry-instrumentation-fastapi +opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http diff --git a/examples/fastapi/fastapi_tracing_app/alembic.ini b/examples/fastapi/fastapi_tracing_app/alembic.ini deleted file mode 100644 index c08a8b6..0000000 --- a/examples/fastapi/fastapi_tracing_app/alembic.ini +++ /dev/null @@ -1,5 +0,0 @@ -; Copyright 2025 Canonical Ltd. -; See LICENSE file for licensing details. - -[alembic] -script_location = alembic diff --git a/examples/fastapi/fastapi_tracing_app/alembic/env.py b/examples/fastapi/fastapi_tracing_app/alembic/env.py deleted file mode 100644 index 4d8d851..0000000 --- a/examples/fastapi/fastapi_tracing_app/alembic/env.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -import os -import sys - -from alembic import context - -sys.path.append(os.getcwd()) - -from app import Base, engine - -config = context.config -target_metadata = Base.metadata - - -def run_migrations(): - with engine.connect() as connection: - context.configure(connection=connection, target_metadata=target_metadata) - with context.begin_transaction(): - context.run_migrations() - - -run_migrations() diff --git a/examples/fastapi/fastapi_tracing_app/alembic/versions/eca6177bd16a_initial_migration.py b/examples/fastapi/fastapi_tracing_app/alembic/versions/eca6177bd16a_initial_migration.py deleted file mode 100644 index 7838caf..0000000 --- a/examples/fastapi/fastapi_tracing_app/alembic/versions/eca6177bd16a_initial_migration.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -"""Initial migration - -Revision ID: eca6177bd16a -Revises: -Create Date: 2023-09-05 17:12:56.303534 - -""" -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op - -# revision identifiers, used by Alembic. -revision: str = "eca6177bd16a" -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade(): - op.create_table( - "users", - sa.Column("id", sa.Integer, primary_key=True), - sa.Column("username", sa.String(80), unique=True, nullable=False), - sa.Column("password", sa.String(256), nullable=False), - ) - - -def downgrade(): - op.drop_table("users") diff --git a/examples/fastapi/fastapi_tracing_app/app.py b/examples/fastapi/fastapi_tracing_app/app.py deleted file mode 100644 index e41fa18..0000000 --- a/examples/fastapi/fastapi_tracing_app/app.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. -import os - -from fastapi import FastAPI, HTTPException -from opentelemetry import trace -from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter -from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor -from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace import TracerProvider -from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter -from opentelemetry.trace import get_tracer_provider, set_tracer_provider -from sqlalchemy import Column, Integer, String, create_engine, inspect -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import scoped_session, sessionmaker - -app = FastAPI() - -set_tracer_provider(TracerProvider()) -get_tracer_provider().add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) - -get_tracer_provider().add_span_processor(BatchSpanProcessor(ConsoleSpanExporter())) -FastAPIInstrumentor.instrument_app(app) -tracer = trace.get_tracer(__name__) -engine = create_engine(os.environ["POSTGRESQL_DB_CONNECT_STRING"], echo=True) - -Session = scoped_session(sessionmaker(bind=engine)) - -Base = declarative_base() - - -class User(Base): - __tablename__ = "users" - - id = Column(Integer, primary_key=True) - username = Column(String(80), unique=True, nullable=False) - password = Column(String(256), nullable=False) - - -@app.get("/") -async def root(): - with tracer.start_as_current_span("custom-span"): - return "Hello, World!" - - -@app.get("/env/user-defined-config") -async def user_defined_config(): - return os.getenv("APP_USER_DEFINED_CONFIG", None) - - -@app.get("/table/{table}") -def test_table(table: str): - if inspect(engine).has_table(table): - return "SUCCESS" - else: - raise HTTPException(status_code=404, detail="Table not found") diff --git a/examples/fastapi/fastapi_tracing_app/migrate.sh b/examples/fastapi/fastapi_tracing_app/migrate.sh deleted file mode 100644 index 18ad323..0000000 --- a/examples/fastapi/fastapi_tracing_app/migrate.sh +++ /dev/null @@ -1,5 +0,0 @@ -#! /usr/bin/env bash -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -alembic upgrade head diff --git a/examples/fastapi/fastapi_tracing_app/requirements.txt b/examples/fastapi/fastapi_tracing_app/requirements.txt deleted file mode 100644 index c7f2b22..0000000 --- a/examples/fastapi/fastapi_tracing_app/requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ -fastapi -SQLAlchemy -alembic -psycopg2-binary -opentelemetry-api -opentelemetry-sdk -opentelemetry-distro -opentelemetry-instrumentation-fastapi -opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http -opentelemetry-instrumentation diff --git a/examples/fastapi/fastapi_tracing_app/rockcraft.yaml b/examples/fastapi/fastapi_tracing_app/rockcraft.yaml deleted file mode 100644 index 572a091..0000000 --- a/examples/fastapi/fastapi_tracing_app/rockcraft.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -name: fastapi-tracing-app -summary: Example FastAPI application image. -description: Example FastAPI application image. -version: "0.1" -base: ubuntu@24.04 - -platforms: - amd64: - -extensions: - - fastapi-framework - -parts: - alembic: - plugin: dump - source: . - organize: - alembic: app/alembic - alembic.ini: app/alembic.ini - stage: - - app/alembic - - app/alembic.ini diff --git a/examples/flask/test_rock/app.py b/examples/flask/test_rock/app.py index 9527532..3f8badb 100644 --- a/examples/flask/test_rock/app.py +++ b/examples/flask/test_rock/app.py @@ -20,6 +20,8 @@ import redis from celery import Celery, Task from flask import Flask, g, jsonify, request +from opentelemetry import trace +from opentelemetry.instrumentation.flask import FlaskInstrumentor def hostname(): @@ -60,6 +62,39 @@ def __call__(self, *args: object, **kwargs: object) -> object: celery_app = celery_init_app(app, broker_url) redis_client = redis.Redis.from_url(broker_url) if broker_url else None +FlaskInstrumentor().instrument_app(app) +tracer = trace.get_tracer(__name__) + + +def fib_slow(n): + if n <= 1: + return n + return fib_slow(n - 1) + fib_fast(n - 2) + + +def fib_fast(n): + nth_fib = [0] * (n + 2) + nth_fib[1] = 1 + for i in range(2, n + 1): + nth_fib[i] = nth_fib[i - 1] + nth_fib[i - 2] + return nth_fib[n] + + +@app.route("/fibonacci") +def fibonacci(): + n = int(request.args.get("n", 1)) + with tracer.start_as_current_span("root"): + with tracer.start_as_current_span("fib_slow") as slow_span: + answer = fib_slow(n) + slow_span.set_attribute("n", n) + slow_span.set_attribute("nth_fibonacci", answer) + with tracer.start_as_current_span("fib_fast") as fast_span: + answer = fib_fast(n) + fast_span.set_attribute("n", n) + fast_span.set_attribute("nth_fibonacci", answer) + + return f"F({n}) is: ({answer})" + @celery_app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): diff --git a/examples/flask/test_rock/requirements.txt b/examples/flask/test_rock/requirements.txt index 9966cad..5bd085b 100644 --- a/examples/flask/test_rock/requirements.txt +++ b/examples/flask/test_rock/requirements.txt @@ -9,10 +9,6 @@ boto3 pika celery opentelemetry-api -opentelemetry-exporter-otlp opentelemetry-exporter-otlp-proto-http -opentelemetry-instrumentation opentelemetry-instrumentation-flask -opentelemetry-instrumentation-wsgi opentelemetry-sdk -opentelemetry-distro diff --git a/examples/flask/test_tracing_rock/app.py b/examples/flask/test_tracing_rock/app.py deleted file mode 100644 index 9e8e117..0000000 --- a/examples/flask/test_tracing_rock/app.py +++ /dev/null @@ -1,395 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -import logging -import os -import socket -import time -import urllib.parse -from urllib.parse import urlparse - -import boto3 -import botocore.config -import pika -import psycopg -import pymongo -import pymongo.database -import pymongo.errors -import pymysql -import pymysql.cursors -import redis -from celery import Celery, Task -from flask import Flask, g, jsonify, request -from opentelemetry import trace -from opentelemetry.instrumentation.flask import FlaskInstrumentor - - -def hostname(): - """Get the hostname of the current machine.""" - return socket.gethostbyname(socket.gethostname()) - - -def celery_init_app(app: Flask, broker_url: str) -> Celery: - """Initialise celery using the redis connection string. - - See https://flask.palletsprojects.com/en/3.0.x/patterns/celery/#integrate-celery-with-flask. - """ - - class FlaskTask(Task): - def __call__(self, *args: object, **kwargs: object) -> object: - with app.app_context(): - return self.run(*args, **kwargs) - - celery_app = Celery(app.name, task_cls=FlaskTask) - celery_app.set_default() - app.extensions["celery"] = celery_app - app.config.from_mapping( - CELERY=dict( - broker_url=broker_url, - result_backend=broker_url, - task_ignore_result=True, - ), - ) - celery_app.config_from_object(app.config["CELERY"]) - return celery_app - - -app = Flask(__name__) -app.config.from_prefixed_env() - -FlaskInstrumentor().instrument_app(app) - -tracer = trace.get_tracer(__name__) - -broker_url = os.environ.get("REDIS_DB_CONNECT_STRING") -# Configure Celery only if Redis is configured -celery_app = celery_init_app(app, broker_url) -redis_client = redis.Redis.from_url(broker_url) if broker_url else None - - -def fib_slow(n): - if n <= 1: - return n - return fib_slow(n - 1) + fib_fast(n - 2) - - -def fib_fast(n): - nth_fib = [0] * (n + 2) - nth_fib[1] = 1 - for i in range(2, n + 1): - nth_fib[i] = nth_fib[i - 1] + nth_fib[i - 2] - return nth_fib[n] - - -@app.route("/fibonacci") -def fibonacci(): - n = int(request.args.get("n", 1)) - with tracer.start_as_current_span("root"): - with tracer.start_as_current_span("fib_slow") as slow_span: - answer = fib_slow(n) - slow_span.set_attribute("n", n) - slow_span.set_attribute("nth_fibonacci", answer) - with tracer.start_as_current_span("fib_fast") as fast_span: - answer = fib_fast(n) - fast_span.set_attribute("n", n) - fast_span.set_attribute("nth_fibonacci", answer) - - return f"F({n}) is: ({answer})" - - -@celery_app.on_after_configure.connect -def setup_periodic_tasks(sender, **kwargs): - """Set up periodic tasks in the scheduler.""" - try: - # This will only have an effect in the beat scheduler. - sender.add_periodic_task(0.5, scheduled_task.s(hostname()), name="every 0.5s") - except NameError as e: - logging.exception("Failed to configure the periodic task") - - -@celery_app.task -def scheduled_task(scheduler_hostname): - """Function to run a schedule task in a worker. - - The worker that will run this task will add the scheduler hostname argument - to the "schedulers" set in Redis, and the worker's hostname to the "workers" - set in Redis. - """ - worker_hostname = hostname() - logging.info( - "scheduler host received %s in worker host %s", scheduler_hostname, worker_hostname - ) - redis_client.sadd("schedulers", scheduler_hostname) - redis_client.sadd("workers", worker_hostname) - logging.info("schedulers: %s", redis_client.smembers("schedulers")) - logging.info("workers: %s", redis_client.smembers("workers")) - # The goal is to have all workers busy in all processes. - # For that it maybe necessary to exhaust all workers, but not to get the pending tasks - # too big, so all schedulers can manage to run their scheduled tasks. - # Celery prefetches tasks, and if they cannot be run they are put in reserved. - # If all processes have tasks in reserved, this task will finish immediately to not make - # queues any longer. - inspect_obj = celery_app.control.inspect() - reserved_sizes = [len(tasks) for tasks in inspect_obj.reserved().values()] - logging.info("number of reserved tasks %s", reserved_sizes) - delay = 0 if min(reserved_sizes) > 0 else 5 - time.sleep(delay) - - -def get_mysql_database(): - """Get the mysql db connection.""" - if "mysql_db" not in g: - if "MYSQL_DB_CONNECT_STRING" in os.environ: - uri_parts = urlparse(os.environ["MYSQL_DB_CONNECT_STRING"]) - g.mysql_db = pymysql.connect( - host=uri_parts.hostname, - user=uri_parts.username, - password=uri_parts.password, - database=uri_parts.path[1:], - port=uri_parts.port, - ) - else: - return None - return g.mysql_db - - -def get_postgresql_database(): - """Get the postgresql db connection.""" - if "postgresql_db" not in g: - if "POSTGRESQL_DB_CONNECT_STRING" in os.environ: - g.postgresql_db = psycopg.connect( - conninfo=os.environ["POSTGRESQL_DB_CONNECT_STRING"], - ) - else: - return None - return g.postgresql_db - - -def get_mongodb_database() -> pymongo.database.Database | None: - """Get the mongodb db connection.""" - if "mongodb_db" not in g: - if "MONGODB_DB_CONNECT_STRING" in os.environ: - uri = os.environ["MONGODB_DB_CONNECT_STRING"] - client = pymongo.MongoClient(uri) - db = urllib.parse.urlparse(uri).path.removeprefix("/") - g.mongodb_db = client.get_database(db) - else: - return None - return g.mongodb_db - - -def get_redis_database() -> redis.Redis | None: - if "redis_db" not in g: - if "REDIS_DB_CONNECT_STRING" in os.environ: - uri = os.environ["REDIS_DB_CONNECT_STRING"] - g.redis_db = redis.Redis.from_url(uri) - else: - return None - return g.redis_db - - -def get_rabbitmq_connection() -> pika.BlockingConnection | None: - """Get rabbitmq connection.""" - if "rabbitmq" not in g: - if "RABBITMQ_HOSTNAME" in os.environ: - username = os.environ["RABBITMQ_USERNAME"] - password = os.environ["RABBITMQ_PASSWORD"] - hostname = os.environ["RABBITMQ_HOSTNAME"] - vhost = os.environ["RABBITMQ_VHOST"] - port = os.environ["RABBITMQ_PORT"] - credentials = pika.PlainCredentials(username, password) - parameters = pika.ConnectionParameters(hostname, port, vhost, credentials) - g.rabbitmq = pika.BlockingConnection(parameters) - else: - return None - return g.rabbitmq - - -def get_rabbitmq_connection_from_uri() -> pika.BlockingConnection | None: - """Get rabbitmq connection from uri.""" - if "rabbitmq_from_uri" not in g: - if "RABBITMQ_CONNECT_STRING" in os.environ: - uri = os.environ["RABBITMQ_CONNECT_STRING"] - parameters = pika.URLParameters(uri) - g.rabbitmq_from_uri = pika.BlockingConnection(parameters) - else: - return None - return g.rabbitmq_from_uri - - -def get_boto3_client(): - if "boto3_client" not in g: - if "S3_ACCESS_KEY" in os.environ: - s3_client_config = botocore.config.Config( - s3={ - "addressing_style": os.environ["S3_ADDRESSING_STYLE"], - }, - # no_proxy env variable is not read by boto3, so - # this is needed for the tests to avoid hitting the proxy. - proxies={}, - ) - g.boto3_client = boto3.client( - "s3", - os.environ["S3_REGION"], - aws_access_key_id=os.environ["S3_ACCESS_KEY"], - aws_secret_access_key=os.environ["S3_SECRET_KEY"], - endpoint_url=os.environ["S3_ENDPOINT"], - use_ssl=False, - config=s3_client_config, - ) - else: - return None - return g.boto3_client - - -@app.teardown_appcontext -def teardown_database(_): - """Tear down databases connections.""" - mysql_db = g.pop("mysql_db", None) - if mysql_db is not None: - mysql_db.close() - postgresql_db = g.pop("postgresql_db", None) - if postgresql_db is not None: - postgresql_db.close() - mongodb_db = g.pop("mongodb_db", None) - if mongodb_db is not None: - mongodb_db.client.close() - boto3_client = g.pop("boto3_client", None) - if boto3_client is not None: - boto3_client.close() - rabbitmq = g.pop("rabbitmq", None) - if rabbitmq is not None: - rabbitmq.close() - rabbitmq_from_uri = g.pop("rabbitmq_from_uri", None) - if rabbitmq_from_uri is not None: - rabbitmq_from_uri.close() - - -@app.route("/") -def hello_world(): - return "Hello, World!" - - -@app.route("/sleep") -def sleep(): - duration_seconds = int(request.args.get("duration")) - time.sleep(duration_seconds) - return "" - - -@app.route("/config/") -def config(config_name: str): - return jsonify(app.config.get(config_name)) - - -@app.route("/mysql/status") -def mysql_status(): - """Mysql status endpoint.""" - if database := get_mysql_database(): - with database.cursor() as cursor: - sql = "SELECT version()" - cursor.execute(sql) - cursor.fetchone() - return "SUCCESS" - return "FAIL" - - -@app.route("/s3/status") -def s3_status(): - """S3 status endpoint.""" - if client := get_boto3_client(): - bucket_name = os.environ["S3_BUCKET"] - objectsresponse = client.list_objects(Bucket=bucket_name) - return "SUCCESS" - return "FAIL" - - -@app.route("/postgresql/status") -def postgresql_status(): - """Postgresql status endpoint.""" - if database := get_postgresql_database(): - with database.cursor() as cursor: - sql = "SELECT version()" - cursor.execute(sql) - cursor.fetchone() - return "SUCCESS" - return "FAIL" - - -@app.route("/mongodb/status") -def mongodb_status(): - """Mongodb status endpoint.""" - if (database := get_mongodb_database()) is not None: - database.list_collection_names() - return "SUCCESS" - return "FAIL" - - -@app.route("/redis/status") -def redis_status(): - """Redis status endpoint.""" - if database := get_redis_database(): - try: - database.set("foo", "bar") - return "SUCCESS" - except redis.exceptions.RedisError: - logging.exception("Error querying redis") - return "FAIL" - - -@app.route("/redis/clear_celery_stats") -def redis_celery_clear_stats(): - """Reset Redis statistics about workers and schedulers.""" - if database := get_redis_database(): - try: - database.delete("workers") - database.delete("schedulers") - return "SUCCESS" - except redis.exceptions.RedisError: - logging.exception("Error querying redis") - return "FAIL", 500 - - -@app.route("/redis/celery_stats") -def redis_celery_stats(): - """Read Redis statistics about workers and schedulers.""" - if database := get_redis_database(): - try: - worker_set = [str(host) for host in database.smembers("workers")] - beat_set = [str(host) for host in database.smembers("schedulers")] - return jsonify({"workers": worker_set, "schedulers": beat_set}) - except redis.exceptions.RedisError: - logging.exception("Error querying redis") - return "FAIL", 500 - - -@app.route("/rabbitmq/send") -def rabbitmq_send(): - """Send a message to "charm" queue.""" - if connection := get_rabbitmq_connection(): - channel = connection.channel() - channel.queue_declare(queue="charm") - channel.basic_publish(exchange="", routing_key="charm", body="SUCCESS") - return "SUCCESS" - return "FAIL" - - -@app.route("/rabbitmq/receive") -def rabbitmq_receive(): - """Receive a message from "charm" queue in blocking form.""" - if connection := get_rabbitmq_connection_from_uri(): - channel = connection.channel() - method_frame, _header_frame, body = channel.basic_get("charm") - if method_frame: - channel.basic_ack(method_frame.delivery_tag) - if body == b"SUCCESS": - return "SUCCESS" - return "FAIL. INCORRECT MESSAGE." - return "FAIL. NO MESSAGE." - return "FAIL. NO CONNECTION." - - -@app.route("/env") -def get_env(): - """Return environment variables""" - return jsonify(dict(os.environ)) diff --git a/examples/flask/test_tracing_rock/requirements.txt b/examples/flask/test_tracing_rock/requirements.txt deleted file mode 100644 index 9966cad..0000000 --- a/examples/flask/test_tracing_rock/requirements.txt +++ /dev/null @@ -1,18 +0,0 @@ -Flask -PyMySQL -PyMySQL[rsa] -PyMySQL[ed25519] -psycopg[binary] -pymongo -redis[hiredis] -boto3 -pika -celery -opentelemetry-api -opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http -opentelemetry-instrumentation -opentelemetry-instrumentation-flask -opentelemetry-instrumentation-wsgi -opentelemetry-sdk -opentelemetry-distro diff --git a/examples/flask/test_tracing_rock/rockcraft.yaml b/examples/flask/test_tracing_rock/rockcraft.yaml deleted file mode 100644 index 6a7f269..0000000 --- a/examples/flask/test_tracing_rock/rockcraft.yaml +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. -name: test-tracing-flask -summary: A flask test app -description: OCI image for the test flask app -version: "0.1" -base: ubuntu@22.04 -license: Apache-2.0 -platforms: - amd64: - -extensions: - - flask-framework - -services: - celery-worker: - override: replace - command: celery -A app:celery_app worker -c 2 --loglevel DEBUG - startup: enabled - user: _daemon_ - working-dir: /flask/app - celery-beat-scheduler: - override: replace - command: celery -A app:celery_app beat --loglevel DEBUG -s /tmp/celerybeat-schedule - startup: enabled - user: _daemon_ - working-dir: /flask/app diff --git a/examples/go/go_app/go.mod b/examples/go/go_app/go.mod index 6bd1996..5b7da97 100644 --- a/examples/go/go_app/go.mod +++ b/examples/go/go_app/go.mod @@ -1,8 +1,10 @@ // Copyright 2025 Canonical Ltd. // See LICENSE file for licensing details. -module go-app +module go-tracing-app -go 1.22.5 +go 1.22.7 + +toolchain go1.22.10 require ( github.com/jackc/pgx/v5 v5.6.0 @@ -11,16 +13,34 @@ require ( require ( github.com/beorn7/perks v1.0.1 // indirect - github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect github.com/jackc/puddle/v2 v2.2.1 // indirect github.com/prometheus/client_model v0.5.0 // indirect github.com/prometheus/common v0.48.0 // indirect github.com/prometheus/procfs v0.12.0 // indirect - golang.org/x/crypto v0.17.0 // indirect - golang.org/x/sync v0.3.0 // indirect - golang.org/x/sys v0.17.0 // indirect - golang.org/x/text v0.14.0 // indirect - google.golang.org/protobuf v1.33.0 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/otel v1.33.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 // indirect + go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.33.0 // indirect + go.opentelemetry.io/otel/metric v1.33.0 // indirect + go.opentelemetry.io/otel/sdk v1.33.0 // indirect + go.opentelemetry.io/otel/trace v1.33.0 // indirect + go.opentelemetry.io/proto/otlp v1.4.0 // indirect + golang.org/x/crypto v0.30.0 // indirect + golang.org/x/net v0.32.0 // indirect + golang.org/x/sync v0.10.0 // indirect + golang.org/x/sys v0.28.0 // indirect + golang.org/x/text v0.21.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576 // indirect + google.golang.org/grpc v1.68.1 // indirect + google.golang.org/protobuf v1.35.2 // indirect ) diff --git a/examples/go/go_app/go.sum b/examples/go/go_app/go.sum index 8abab05..d670637 100644 --- a/examples/go/go_app/go.sum +++ b/examples/go/go_app/go.sum @@ -1,12 +1,25 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0 h1:TmHmbvxPmaegwhDubVz0lICL0J5Ka2vwTzhoePEXsGE= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0/go.mod h1:qztMSjm835F2bXf+5HKAPIS5qsmQDqZna/PgVt4rWtI= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= @@ -30,16 +43,53 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/otel v1.33.0 h1:/FerN9bax5LoK51X/sI0SVYrjSE0/yUL7DpxW4K3FWw= +go.opentelemetry.io/otel v1.33.0/go.mod h1:SUUkR6csvUQl+yjReHu5uM3EtVV7MBm5FHKRlNx4I8I= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 h1:Vh5HayB/0HHfOQA7Ctx69E/Y/DcQSMPpKANYVMQ7fBA= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0/go.mod h1:cpgtDBaqD/6ok/UG0jT15/uKjAY8mRA53diogHBg3UI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 h1:wpMfgF8E1rkrT1Z6meFh1NDtownE9Ii3n3X2GJYjsaU= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0/go.mod h1:wAy0T/dUbs468uOlkT31xjvqQgEVXv58BRFWEgn5v/0= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.33.0 h1:W5AWUn/IVe8RFb5pZx1Uh9Laf/4+Qmm4kJL5zPuvR+0= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.33.0/go.mod h1:mzKxJywMNBdEX8TSJais3NnsVZUaJ+bAy6UxPTng2vk= +go.opentelemetry.io/otel/metric v1.33.0 h1:r+JOocAyeRVXD8lZpjdQjzMadVZp2M4WmQ+5WtEnklQ= +go.opentelemetry.io/otel/metric v1.33.0/go.mod h1:L9+Fyctbp6HFTddIxClbQkjtubW6O9QS3Ann/M82u6M= +go.opentelemetry.io/otel/sdk v1.33.0 h1:iax7M131HuAm9QkZotNHEfstof92xM+N8sr3uHXc2IM= +go.opentelemetry.io/otel/sdk v1.33.0/go.mod h1:A1Q5oi7/9XaMlIWzPSxLRWOI8nG3FnzHJNbiENQuihM= +go.opentelemetry.io/otel/trace v1.33.0 h1:cCJuF7LRjUFso9LPnEAHJDB2pqzp+hbO8eu1qqW2d/s= +go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck= +go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg= +go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY= golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.30.0 h1:RwoQn3GkWiMkzlX562cLB7OxWvjH1L8xutO2WoJcRoY= +golang.org/x/crypto v0.30.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/net v0.32.0 h1:ZqPmj8Kzc+Y6e0+skZsuACbx+wzMgo5MQsJh9Qd6aYI= +golang.org/x/net v0.32.0/go.mod h1:CwU0IoeOlnQQWJ6ioyFrfRuomB8GKF6KbYXZVyeXNfs= golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 h1:CkkIfIt50+lT6NHAVoRYEyAvQGFM7xEwXUUywFvEb3Q= +google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576/go.mod h1:1R3kvZ1dtP3+4p4d3G8uJ8rFk/fWlScl38vanWACI08= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576 h1:8ZmaLZE4XWrtU3MyClkYqqtl6Oegr3235h7jxsDyqCY= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576/go.mod h1:5uTbfoYQed2U9p3KIj2/Zzm02PYhndfdmML0qC3q3FU= +google.golang.org/grpc v1.68.1 h1:oI5oTa11+ng8r8XMMN7jAOmWfPZWbYpCFaMUTACxkM0= +google.golang.org/grpc v1.68.1/go.mod h1:+q1XYFJjShcqn0QZHvCyeR4CXPA+llXIeUIfIe00waw= google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= +google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= diff --git a/examples/go/go_app/internal/service/service.go b/examples/go/go_app/internal/service/service.go index ed53d24..d7d9115 100644 --- a/examples/go/go_app/internal/service/service.go +++ b/examples/go/go_app/internal/service/service.go @@ -6,8 +6,31 @@ package service import ( "database/sql" "log" + + "context" + + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/trace" ) +var lemonsKey = attribute.Key("ex.com/lemons") + +// SubOperation is an example to demonstrate the use of named tracer. +// It creates a named tracer with its package path. +func SubOperation(ctx context.Context) error { + // Using global provider. Alternative is to have application provide a getter + // for its component to get the instance of the provider. + tr := otel.Tracer("example.com/go-tracing-app") + + var span trace.Span + _, span = tr.Start(ctx, "Sub operation...") + defer span.End() + span.SetAttributes(lemonsKey.String("five")) + span.AddEvent("Sub span event") + + return nil +} type Service struct { PostgresqlURL string } diff --git a/examples/go/go_app/main.go b/examples/go/go_app/main.go index ce92162..efa2601 100644 --- a/examples/go/go_app/main.go +++ b/examples/go/go_app/main.go @@ -7,7 +7,7 @@ import ( "context" "errors" "fmt" - "go-app/internal/service" + "go-tracing-app/internal/service" "io" "log" "os" @@ -21,6 +21,12 @@ import ( _ "github.com/jackc/pgx/v5/stdlib" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" + + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp" + sdktrace "go.opentelemetry.io/otel/sdk/trace" + "go.opentelemetry.io/otel/trace" ) type mainHandler struct { @@ -58,8 +64,51 @@ func (h mainHandler) servePostgresql(w http.ResponseWriter, r *http.Request) { } } + +// var ( +// fooKey = attribute.Key("ex.com/foo") +// barKey = attribute.Key("ex.com/bar") +// anotherKey = attribute.Key("ex.com/another") +// ) + +var tp *sdktrace.TracerProvider + +// initTracer creates and registers trace provider instance. +func initTracer(ctx context.Context) error { + exp, err := otlptracehttp.New(ctx) + if err != nil { + return fmt.Errorf("failed to initialize stdouttrace exporter: %w", err) + } + bsp := sdktrace.NewBatchSpanProcessor(exp) + tp = sdktrace.NewTracerProvider( + sdktrace.WithSampler(sdktrace.AlwaysSample()), + sdktrace.WithSpanProcessor(bsp), + ) + otel.SetTracerProvider(tp) + return nil +} + func main() { - metricsPort, found := os.LookupEnv("APP_METRICS_PORT") + ctx := context.Background() + // initialize trace provider. + if err := initTracer(ctx); err != nil { + log.Panic(err) + } + + // Create a named tracer with package path as its name. + tracer := tp.Tracer("example.com/go-tracing-app") + defer func() { _ = tp.Shutdown(ctx) }() + + + var span trace.Span + ctx, span = tracer.Start(ctx, "operation") + defer span.End() + span.AddEvent("Nice operation!", trace.WithAttributes(attribute.Int("bogons", 100))) + // span.SetAttributes(anotherKey.String("yes")) + if err := service.SubOperation(ctx); err != nil { + panic(err) + } + metricsPort, found := os.LookupEnv("APP_METRICS_PORT") if !found { metricsPort = "8080" } diff --git a/examples/go/go_tracing_app/go.mod b/examples/go/go_tracing_app/go.mod deleted file mode 100644 index 5b7da97..0000000 --- a/examples/go/go_tracing_app/go.mod +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2025 Canonical Ltd. -// See LICENSE file for licensing details. -module go-tracing-app - -go 1.22.7 - -toolchain go1.22.10 - -require ( - github.com/jackc/pgx/v5 v5.6.0 - github.com/prometheus/client_golang v1.19.1 -) - -require ( - github.com/beorn7/perks v1.0.1 // indirect - github.com/cenkalti/backoff/v4 v4.3.0 // indirect - github.com/cespare/xxhash/v2 v2.3.0 // indirect - github.com/go-logr/logr v1.4.2 // indirect - github.com/go-logr/stdr v1.2.2 // indirect - github.com/google/uuid v1.6.0 // indirect - github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0 // indirect - github.com/jackc/pgpassfile v1.0.0 // indirect - github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect - github.com/jackc/puddle/v2 v2.2.1 // indirect - github.com/prometheus/client_model v0.5.0 // indirect - github.com/prometheus/common v0.48.0 // indirect - github.com/prometheus/procfs v0.12.0 // indirect - go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/otel v1.33.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 // indirect - go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.33.0 // indirect - go.opentelemetry.io/otel/metric v1.33.0 // indirect - go.opentelemetry.io/otel/sdk v1.33.0 // indirect - go.opentelemetry.io/otel/trace v1.33.0 // indirect - go.opentelemetry.io/proto/otlp v1.4.0 // indirect - golang.org/x/crypto v0.30.0 // indirect - golang.org/x/net v0.32.0 // indirect - golang.org/x/sync v0.10.0 // indirect - golang.org/x/sys v0.28.0 // indirect - golang.org/x/text v0.21.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576 // indirect - google.golang.org/grpc v1.68.1 // indirect - google.golang.org/protobuf v1.35.2 // indirect -) diff --git a/examples/go/go_tracing_app/go.sum b/examples/go/go_tracing_app/go.sum deleted file mode 100644 index d670637..0000000 --- a/examples/go/go_tracing_app/go.sum +++ /dev/null @@ -1,96 +0,0 @@ -github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= -github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= -github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= -github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= -github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= -github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= -github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= -github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= -github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0 h1:TmHmbvxPmaegwhDubVz0lICL0J5Ka2vwTzhoePEXsGE= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0/go.mod h1:qztMSjm835F2bXf+5HKAPIS5qsmQDqZna/PgVt4rWtI= -github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= -github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= -github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= -github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= -github.com/jackc/pgx/v5 v5.6.0 h1:SWJzexBzPL5jb0GEsrPMLIsi/3jOo7RHlzTjcAeDrPY= -github.com/jackc/pgx/v5 v5.6.0/go.mod h1:DNZ/vlrUnhWCoFGxHAG8U2ljioxukquj7utPDgtQdTw= -github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= -github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE= -github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho= -github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= -github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= -github.com/prometheus/common v0.48.0 h1:QO8U2CdOzSn1BBsmXJXduaaW+dY/5QLjfB8svtSzKKE= -github.com/prometheus/common v0.48.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc= -github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= -github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= -go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/otel v1.33.0 h1:/FerN9bax5LoK51X/sI0SVYrjSE0/yUL7DpxW4K3FWw= -go.opentelemetry.io/otel v1.33.0/go.mod h1:SUUkR6csvUQl+yjReHu5uM3EtVV7MBm5FHKRlNx4I8I= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 h1:Vh5HayB/0HHfOQA7Ctx69E/Y/DcQSMPpKANYVMQ7fBA= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0/go.mod h1:cpgtDBaqD/6ok/UG0jT15/uKjAY8mRA53diogHBg3UI= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 h1:wpMfgF8E1rkrT1Z6meFh1NDtownE9Ii3n3X2GJYjsaU= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0/go.mod h1:wAy0T/dUbs468uOlkT31xjvqQgEVXv58BRFWEgn5v/0= -go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.33.0 h1:W5AWUn/IVe8RFb5pZx1Uh9Laf/4+Qmm4kJL5zPuvR+0= -go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.33.0/go.mod h1:mzKxJywMNBdEX8TSJais3NnsVZUaJ+bAy6UxPTng2vk= -go.opentelemetry.io/otel/metric v1.33.0 h1:r+JOocAyeRVXD8lZpjdQjzMadVZp2M4WmQ+5WtEnklQ= -go.opentelemetry.io/otel/metric v1.33.0/go.mod h1:L9+Fyctbp6HFTddIxClbQkjtubW6O9QS3Ann/M82u6M= -go.opentelemetry.io/otel/sdk v1.33.0 h1:iax7M131HuAm9QkZotNHEfstof92xM+N8sr3uHXc2IM= -go.opentelemetry.io/otel/sdk v1.33.0/go.mod h1:A1Q5oi7/9XaMlIWzPSxLRWOI8nG3FnzHJNbiENQuihM= -go.opentelemetry.io/otel/trace v1.33.0 h1:cCJuF7LRjUFso9LPnEAHJDB2pqzp+hbO8eu1qqW2d/s= -go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck= -go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg= -go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY= -golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= -golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= -golang.org/x/crypto v0.30.0 h1:RwoQn3GkWiMkzlX562cLB7OxWvjH1L8xutO2WoJcRoY= -golang.org/x/crypto v0.30.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/net v0.32.0 h1:ZqPmj8Kzc+Y6e0+skZsuACbx+wzMgo5MQsJh9Qd6aYI= -golang.org/x/net v0.32.0/go.mod h1:CwU0IoeOlnQQWJ6ioyFrfRuomB8GKF6KbYXZVyeXNfs= -golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= -golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= -golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= -golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= -golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= -golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= -golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 h1:CkkIfIt50+lT6NHAVoRYEyAvQGFM7xEwXUUywFvEb3Q= -google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576/go.mod h1:1R3kvZ1dtP3+4p4d3G8uJ8rFk/fWlScl38vanWACI08= -google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576 h1:8ZmaLZE4XWrtU3MyClkYqqtl6Oegr3235h7jxsDyqCY= -google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576/go.mod h1:5uTbfoYQed2U9p3KIj2/Zzm02PYhndfdmML0qC3q3FU= -google.golang.org/grpc v1.68.1 h1:oI5oTa11+ng8r8XMMN7jAOmWfPZWbYpCFaMUTACxkM0= -google.golang.org/grpc v1.68.1/go.mod h1:+q1XYFJjShcqn0QZHvCyeR4CXPA+llXIeUIfIe00waw= -google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= -google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= -google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= -google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/examples/go/go_tracing_app/internal/service/service.go b/examples/go/go_tracing_app/internal/service/service.go deleted file mode 100644 index d7d9115..0000000 --- a/examples/go/go_tracing_app/internal/service/service.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2025 Canonical Ltd. -// See LICENSE file for licensing details. - -package service - -import ( - "database/sql" - "log" - - "context" - - "go.opentelemetry.io/otel" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/trace" -) - -var lemonsKey = attribute.Key("ex.com/lemons") - -// SubOperation is an example to demonstrate the use of named tracer. -// It creates a named tracer with its package path. -func SubOperation(ctx context.Context) error { - // Using global provider. Alternative is to have application provide a getter - // for its component to get the instance of the provider. - tr := otel.Tracer("example.com/go-tracing-app") - - var span trace.Span - _, span = tr.Start(ctx, "Sub operation...") - defer span.End() - span.SetAttributes(lemonsKey.String("five")) - span.AddEvent("Sub span event") - - return nil -} -type Service struct { - PostgresqlURL string -} - -func (s *Service) CheckPostgresqlMigrateStatus() (err error) { - db, err := sql.Open("pgx", s.PostgresqlURL) - if err != nil { - return - } - defer db.Close() - - var version string - err = db.QueryRow("SELECT version()").Scan(&version) - if err != nil { - return - } - log.Printf("postgresql version %s.", version) - - var numUsers int - // This will fail if the table does not exist. - err = db.QueryRow("SELECT count(*) from USERS").Scan(&numUsers) - if err != nil { - return - } - log.Printf("Number of users in Postgresql %d.", numUsers) - - return -} diff --git a/examples/go/go_tracing_app/main.go b/examples/go/go_tracing_app/main.go deleted file mode 100644 index c4ab87f..0000000 --- a/examples/go/go_tracing_app/main.go +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2025 Canonical Ltd. -// See LICENSE file for licensing details. - -package main - -import ( - "context" - "errors" - "fmt" - "go-tracing-app/internal/service" - "io" - "log" - "os" - "os/signal" - "syscall" - "time" - - "encoding/json" - "net/http" - - _ "github.com/jackc/pgx/v5/stdlib" - "github.com/prometheus/client_golang/prometheus" - "github.com/prometheus/client_golang/prometheus/promhttp" - - "go.opentelemetry.io/otel" - "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp" - sdktrace "go.opentelemetry.io/otel/sdk/trace" - "go.opentelemetry.io/otel/trace" -) - -type mainHandler struct { - counter prometheus.Counter - service service.Service -} - -func (h mainHandler) serveHelloWorld(w http.ResponseWriter, r *http.Request) { - h.counter.Inc() - log.Printf("Counter %#v\n", h.counter) - fmt.Fprintf(w, "Hello, World!") -} - -func (h mainHandler) serveUserDefinedConfig(w http.ResponseWriter, r *http.Request) { - h.counter.Inc() - - w.Header().Set("Content-Type", "application/json") - - user_defined_config, found := os.LookupEnv("APP_USER_DEFINED_CONFIG") - if !found { - json.NewEncoder(w).Encode(nil) - return - } - json.NewEncoder(w).Encode(user_defined_config) -} - -func (h mainHandler) servePostgresql(w http.ResponseWriter, r *http.Request) { - err := h.service.CheckPostgresqlMigrateStatus() - if err != nil { - log.Printf(err.Error()) - io.WriteString(w, "FAILURE") - return - } else { - io.WriteString(w, "SUCCESS") - } -} - - -var ( - fooKey = attribute.Key("ex.com/foo") - barKey = attribute.Key("ex.com/bar") - anotherKey = attribute.Key("ex.com/another") -) - -var tp *sdktrace.TracerProvider - -// initTracer creates and registers trace provider instance. -func initTracer(ctx context.Context) error { - exp, err := otlptracehttp.New(ctx) - if err != nil { - return fmt.Errorf("failed to initialize stdouttrace exporter: %w", err) - } - bsp := sdktrace.NewBatchSpanProcessor(exp) - tp = sdktrace.NewTracerProvider( - sdktrace.WithSampler(sdktrace.AlwaysSample()), - sdktrace.WithSpanProcessor(bsp), - ) - otel.SetTracerProvider(tp) - return nil -} - -func main() { - ctx := context.Background() - // initialize trace provider. - if err := initTracer(ctx); err != nil { - log.Panic(err) - } - - // Create a named tracer with package path as its name. - tracer := tp.Tracer("example.com/go-tracing-app") - defer func() { _ = tp.Shutdown(ctx) }() - - - var span trace.Span - ctx, span = tracer.Start(ctx, "operation") - defer span.End() - span.AddEvent("Nice operation!", trace.WithAttributes(attribute.Int("bogons", 100))) - span.SetAttributes(anotherKey.String("yes")) - if err := service.SubOperation(ctx); err != nil { - panic(err) - } - metricsPort, found := os.LookupEnv("APP_METRICS_PORT") - if !found { - metricsPort = "8080" - } - metricsPath, found := os.LookupEnv("APP_METRICS_PATH") - if !found { - metricsPath = "/metrics" - } - port, found := os.LookupEnv("APP_PORT") - if !found { - port = "8080" - } - - requestCounter := prometheus.NewCounter( - prometheus.CounterOpts{ - Name: "request_count", - Help: "No of request handled", - }) - postgresqlURL := os.Getenv("POSTGRESQL_DB_CONNECT_STRING") - - mux := http.NewServeMux() - mainHandler := mainHandler{ - counter: requestCounter, - service: service.Service{PostgresqlURL: postgresqlURL}, - } - mux.HandleFunc("/", mainHandler.serveHelloWorld) - mux.HandleFunc("/env/user-defined-config", mainHandler.serveUserDefinedConfig) - mux.HandleFunc("/postgresql/migratestatus", mainHandler.servePostgresql) - - if metricsPort != port { - prometheus.MustRegister(requestCounter) - - prometheusMux := http.NewServeMux() - prometheusMux.Handle(metricsPath, promhttp.Handler()) - prometheusServer := &http.Server{ - Addr: ":" + metricsPort, - Handler: prometheusMux, - } - go func() { - if err := prometheusServer.ListenAndServe(); !errors.Is(err, http.ErrServerClosed) { - log.Fatalf("Prometheus HTTP server error: %v", err) - } - log.Println("Prometheus HTTP Stopped serving new connections.") - }() - } else { - mux.Handle("/metrics", promhttp.Handler()) - } - - server := &http.Server{ - Addr: ":" + port, - Handler: mux, - } - go func() { - if err := server.ListenAndServe(); !errors.Is(err, http.ErrServerClosed) { - log.Fatalf("HTTP server error: %v", err) - } - log.Println("Stopped serving new connections.") - }() - - sigChan := make(chan os.Signal, 1) - signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM) - <-sigChan - - shutdownCtx, shutdownRelease := context.WithTimeout(context.Background(), 10*time.Second) - defer shutdownRelease() - - if err := server.Shutdown(shutdownCtx); err != nil { - log.Fatalf("HTTP shutdown error: %v", err) - } - log.Println("Graceful shutdown complete.") -} diff --git a/examples/go/go_tracing_app/migrate.sh b/examples/go/go_tracing_app/migrate.sh deleted file mode 100755 index 80b4034..0000000 --- a/examples/go/go_tracing_app/migrate.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -PGPASSWORD="${POSTGRESQL_DB_PASSWORD}" psql -h "${POSTGRESQL_DB_HOSTNAME}" -U "${POSTGRESQL_DB_USERNAME}" "${POSTGRESQL_DB_NAME}" -c "CREATE TABLE IF NOT EXISTS USERS(NAME CHAR(50));" diff --git a/examples/go/go_tracing_app/rockcraft.yaml b/examples/go/go_tracing_app/rockcraft.yaml deleted file mode 100644 index 5488285..0000000 --- a/examples/go/go_tracing_app/rockcraft.yaml +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. -name: go-tracing-app -# see https://documentation.ubuntu.com/rockcraft/en/stable/explanation/bases/ -# for more information about bases and using 'bare' bases for chiselled rocks -base: ubuntu@24.04 # the base environment for this Go application -version: '0.1' # just for humans. Semantic versioning is recommended -summary: A summary of your Go application # 79 char long summary -description: | - This is go-tracing-app's description. You have a paragraph or two to tell the - most important story about it. Keep it under 100 words though, - we live in tweetspace and your description wants to look good in the - container registries out there. -platforms: # The platforms this rock should be built on and run on - amd64: - -# to ensure the go-framework extension functions properly, your Go project -# should have a go.mod file. Check the parts section for the selection of -# the default binary. -# see https://documentation.ubuntu.com/rockcraft/en/stable/reference/extensions/go-framework -# for more information. -# +-- go_tracing_app -# | |-- go.mod -# | |-- migrate.sh - -extensions: - - go-framework - -# Uncomment the sections you need and adjust according to your requirements. -parts: -# go-framework/install-app: -# # Select a specific Go version. Otherwise the current stable one will be used. -# build-snaps: -# - go/1.22/stable -# organize: -# # If the main package is in the base directory and the rockcraft name -# # attribute is equal to the go module name, the name of the server will -# # be selected correctly, otherwise you can adjust it. -# # The file in /usr/local/bin/ with the name of the rockcraft will be -# # the binary to run your server. -# # You can also include here other binary files to be included in the rock. -# bin/otherbinary: usr/local/bin/projectname - -# go-framework/assets: -# stage: -# # By default, only the files in templates/ and static/ -# # are copied into the image. You can modify the list below to override -# # the default list and include or exclude specific files/directories -# # in your project. -# # Note: Prefix each entry with "go/" followed by the local path. -# - go/templates -# - go/static -# - go/otherdirectory -# - go/otherfile - - runtime-debs: - plugin: nil - stage-packages: - # Added manually for the migrations - - postgresql-client diff --git a/tests/conftest.py b/tests/conftest.py index f6f32b0..615192d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,13 +9,9 @@ def pytest_addoption(parser): parser.addoption("--charm-file", action="extend", nargs="+", default=[]) parser.addoption("--test-flask-image", action="store") parser.addoption("--test-async-flask-image", action="store") - parser.addoption("--test-tracing-flask-image", action="store") parser.addoption("--test-db-flask-image", action="store") parser.addoption("--django-app-image", action="store") parser.addoption("--django-async-app-image", action="store") - parser.addoption("--django-tracing-app-image", action="store") parser.addoption("--fastapi-app-image", action="store") - parser.addoption("--fastapi-tracing-app-image", action="store") parser.addoption("--go-app-image", action="store") - parser.addoption("--go-tracing-app-image", action="store") parser.addoption("--localstack-address", action="store") diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 5e885f7..4bc8eb4 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -3,6 +3,7 @@ import json import logging +import pathlib import pytest import pytest_asyncio @@ -10,11 +11,154 @@ from juju.client.jujudata import FileJujuData from juju.juju import Juju from juju.model import Controller, Model +from pytest import Config, FixtureRequest from pytest_operator.plugin import OpsTest +PROJECT_ROOT = pathlib.Path(__file__).parent.parent.parent logger = logging.getLogger(__name__) +@pytest.fixture(scope="module", name="test_flask_image") +def fixture_test_flask_image(pytestconfig: Config): + """Return the --test-flask-image test parameter.""" + test_flask_image = pytestconfig.getoption("--test-flask-image") + if not test_flask_image: + raise ValueError("the following arguments are required: --test-flask-image") + return test_flask_image + + +@pytest.fixture(scope="module", name="django_app_image") +def fixture_django_app_image(pytestconfig: Config): + """Return the --django-app-image test parameter.""" + image = pytestconfig.getoption("--django-app-image") + if not image: + raise ValueError("the following arguments are required: --django-app-image") + return image + + +@pytest.fixture(scope="module", name="fastapi_app_image") +def fixture_fastapi_app_image(pytestconfig: Config): + """Return the --fastapi-app-image test parameter.""" + image = pytestconfig.getoption("--fastapi-app-image") + if not image: + raise ValueError("the following arguments are required: --fastapi-app-image") + return image + + +@pytest.fixture(scope="module", name="go_app_image") +def fixture_go_app_image(pytestconfig: Config): + """Return the --go-app-image test parameter.""" + image = pytestconfig.getoption("--go-app-image") + if not image: + raise ValueError("the following arguments are required: --go-app-image") + return image + + +async def build_charm_file( + pytestconfig: pytest.Config, ops_test: OpsTest, tmp_path_factory, framework +) -> str: + """Get the existing charm file if exists, build a new one if not.""" + charm_file = next( + (f for f in pytestconfig.getoption("--charm-file") if f"/{framework}-k8s" in f), None + ) + + if not charm_file: + charm_location = PROJECT_ROOT / f"examples/{framework}/charm" + if framework == "flask": + charm_location = PROJECT_ROOT / f"examples/{framework}" + charm_file = await ops_test.build_charm(charm_location) + elif charm_file[0] != "/": + charm_file = PROJECT_ROOT / charm_file + inject_venv(charm_file, PROJECT_ROOT / "src" / "paas_charm") + return pathlib.Path(charm_file).absolute() + + +@pytest_asyncio.fixture(scope="module", name="flask_app") +async def flask_app_fixture( + pytestconfig: pytest.Config, + ops_test: OpsTest, + tmp_path_factory, + model: Model, + test_flask_image: str, +): + """Build and deploy the flask charm with test-flask image.""" + app_name = "flask-k8s" + + resources = { + "flask-app-image": test_flask_image, + } + charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "flask") + app = await model.deploy( + charm_file, resources=resources, application_name=app_name, series="jammy" + ) + await model.wait_for_idle(raise_on_blocked=True) + return app + + +@pytest_asyncio.fixture(scope="module", name="django_app") +async def django_app_fixture( + pytestconfig: pytest.Config, + ops_test: OpsTest, + tmp_path_factory, + model: Model, + django_app_image: str, +): + """Build and deploy the Django charm with django-app image.""" + app_name = "django-k8s" + + resources = { + "django-app-image": django_app_image, + } + charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "django") + + app = await model.deploy( + charm_file, + resources=resources, + config={"django-allowed-hosts": "*"}, + application_name=app_name, + series="jammy", + ) + return app + + +@pytest_asyncio.fixture(scope="module", name="fastapi_app") +async def fastapi_app_fixture( + pytestconfig: pytest.Config, + ops_test: OpsTest, + tmp_path_factory, + model: Model, + fastapi_app_image: str, +): + """Build and deploy the FastAPI charm with fastapi-app image.""" + app_name = "fastapi-k8s" + + resources = { + "app-image": fastapi_app_image, + } + charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "fastapi") + app = await model.deploy(charm_file, resources=resources, application_name=app_name) + return app + + +@pytest_asyncio.fixture(scope="module", name="go_app") +async def go_app_fixture( + pytestconfig: pytest.Config, + ops_test: OpsTest, + tmp_path_factory, + model: Model, + go_app_image: str, +): + """Build and deploy the Go charm with go-app image.""" + app_name = "go-k8s" + + resources = { + "app-image": go_app_image, + } + charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "go") + app = await model.deploy(charm_file, resources=resources, application_name=app_name) + return app + + @pytest_asyncio.fixture(scope="module", name="ops_test_lxd") async def ops_test_lxd_fixture(request, tmp_path_factory, ops_test: OpsTest): """Return a ops_test fixture for lxd, creating the lxd controller if it does not exist.""" diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index 9d56d29..8cc7959 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -35,15 +35,6 @@ def fixture_test_async_flask_image(pytestconfig: Config): return test_flask_image -@pytest.fixture(scope="module", name="test_flask_image") -def fixture_test_flask_image(pytestconfig: Config): - """Return the --test-flask-image test parameter.""" - test_flask_image = pytestconfig.getoption("--test-flask-image") - if not test_flask_image: - raise ValueError("the following arguments are required: --test-flask-image") - return test_flask_image - - @pytest.fixture(scope="module", name="test_db_flask_image") def fixture_test_db_flask_image(pytestconfig: Config): """Return the --test-flask-image test parameter.""" @@ -84,7 +75,6 @@ async def build_charm_fixture(charm_file: str, tmp_path_factory) -> str: "foo-bool": {"type": "boolean"}, "foo-dict": {"type": "string"}, "application-root": {"type": "string"}, - "webserver-worker-class": {"type": "string"}, }, tmp_path_factory.mktemp("flask"), ) diff --git a/tests/integration/integrations/conftest.py b/tests/integration/integrations/conftest.py index ea3242a..0d5b527 100644 --- a/tests/integration/integrations/conftest.py +++ b/tests/integration/integrations/conftest.py @@ -5,12 +5,10 @@ import os import pathlib -from secrets import token_hex import boto3 import pytest import pytest_asyncio -from botocore.config import Config as BotoConfig from juju.application import Application from juju.model import Model from minio import Minio @@ -52,147 +50,6 @@ def cwd(): return os.chdir(PROJECT_ROOT / "examples/flask") -@pytest.fixture(scope="module", name="test_tracing_flask_image") -def fixture_test_tracing_flask_image(pytestconfig: Config): - """Return the --test-flask-tracing-image test parameter.""" - test_flask_image = pytestconfig.getoption("--test-tracing-flask-image") - if not test_flask_image: - raise ValueError("the following arguments are required: --test-tracing-flask-image") - return test_flask_image - - -@pytest.fixture(scope="module", name="django_tracing_app_image") -def fixture_django_tracing_app_image(pytestconfig: Config): - """Return the --django-tracing-app-image test parameter.""" - image = pytestconfig.getoption("--django-tracing-app-image") - if not image: - raise ValueError("the following arguments are required: --django-tracing-app-image") - return image - - -@pytest.fixture(scope="module", name="fastapi_tracing_app_image") -def fixture_fastapi_tracing_app_image(pytestconfig: Config): - """Return the --fastapi-tracing-app-image test parameter.""" - image = pytestconfig.getoption("--fastapi-tracing-app-image") - if not image: - raise ValueError("the following arguments are required: --fastapi-tracing-app-image") - return image - - -@pytest.fixture(scope="module", name="go_tracing_app_image") -def fixture_go_tracing_app_image(pytestconfig: Config): - """Return the --go-tracing-app-image test parameter.""" - image = pytestconfig.getoption("--go-tracing-app-image") - if not image: - raise ValueError("the following arguments are required: --go-tracing-app-image") - return image - - -async def build_charm_file( - pytestconfig: pytest.Config, ops_test: OpsTest, tmp_path_factory, framework -) -> str: - """Get the existing charm file if exists, build a new one if not.""" - charm_file = next( - (f for f in pytestconfig.getoption("--charm-file") if f"/{framework}-k8s" in f), None - ) - - if not charm_file: - charm_location = PROJECT_ROOT / f"examples/{framework}/charm" - if framework == "flask": - charm_location = PROJECT_ROOT / f"examples/{framework}" - charm_file = await ops_test.build_charm(charm_location) - elif charm_file[0] != "/": - charm_file = PROJECT_ROOT / charm_file - inject_venv(charm_file, PROJECT_ROOT / "src" / "paas_charm") - return pathlib.Path(charm_file).absolute() - - -@pytest_asyncio.fixture(scope="module", name="flask_tracing_app") -async def flask_tracing_app_fixture( - pytestconfig: pytest.Config, - ops_test: OpsTest, - tmp_path_factory, - model: Model, - test_tracing_flask_image: str, -): - """Build and deploy the flask charm with test-tracing-flask image.""" - app_name = "flask-tracing-k8s" - - resources = { - "flask-app-image": test_tracing_flask_image, - } - charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "flask") - app = await model.deploy( - charm_file, resources=resources, application_name=app_name, series="jammy" - ) - await model.wait_for_idle(raise_on_blocked=True) - return app - - -@pytest_asyncio.fixture(scope="module", name="django_tracing_app") -async def django_tracing_app_fixture( - pytestconfig: pytest.Config, - ops_test: OpsTest, - tmp_path_factory, - model: Model, - django_tracing_app_image: str, -): - """Build and deploy the Django charm with django-tracing-app image.""" - app_name = "django-tracing-k8s" - - resources = { - "django-app-image": django_tracing_app_image, - } - charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "django") - - app = await model.deploy( - charm_file, - resources=resources, - config={"django-allowed-hosts": "*"}, - application_name=app_name, - series="jammy", - ) - return app - - -@pytest_asyncio.fixture(scope="module", name="fastapi_tracing_app") -async def fastapi_tracing_app_fixture( - pytestconfig: pytest.Config, - ops_test: OpsTest, - tmp_path_factory, - model: Model, - fastapi_tracing_app_image: str, -): - """Build and deploy the FastAPI charm with fastapi-tracing-app image.""" - app_name = "fastapi-tracing-k8s" - - resources = { - "app-image": fastapi_tracing_app_image, - } - charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "fastapi") - app = await model.deploy(charm_file, resources=resources, application_name=app_name) - return app - - -@pytest_asyncio.fixture(scope="module", name="go_tracing_app") -async def go_tracing_app_fixture( - pytestconfig: pytest.Config, - ops_test: OpsTest, - tmp_path_factory, - model: Model, - go_tracing_app_image: str, -): - """Build and deploy the Go charm with go-tracing-app image.""" - app_name = "go-tracing-k8s" - - resources = { - "app-image": go_tracing_app_image, - } - charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "go") - app = await model.deploy(charm_file, resources=resources, application_name=app_name) - return app - - async def deploy_and_configure_minio(ops_test: OpsTest, get_unit_ips) -> None: """Deploy and set up minio and s3-integrator needed for s3-like storage backend in the HA charms.""" config = { diff --git a/tests/integration/integrations/test_tracing.py b/tests/integration/integrations/test_tracing.py index d912085..ff32ae3 100644 --- a/tests/integration/integrations/test_tracing.py +++ b/tests/integration/integrations/test_tracing.py @@ -22,10 +22,10 @@ @pytest.mark.parametrize( "tracing_app_fixture, port", [ - ("flask_tracing_app", 8000), - ("django_tracing_app", 8000), - ("fastapi_tracing_app", 8080), - ("go_tracing_app", 8080), + ("flask_app", 8000), + ("django_app", 8000), + ("fastapi_app", 8080), + ("go_app", 8080), ], ) @pytest.mark.skip_juju_version("3.4") # Tempo only supports Juju>=3.4 @@ -51,7 +51,7 @@ async def test_workload_tracing( tracing_app = request.getfixturevalue(tracing_app_fixture) idle_list = [tracing_app.name] - if tracing_app.name != "flask-tracing-k8s": + if tracing_app.name != "flask-k8s": try: postgresql_app = request.getfixturevalue("postgresql_k8s") except Exception as e: From 15536898ea75021921b46a6bfef82d872a85666c Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 20 Jan 2025 08:31:50 +0300 Subject: [PATCH 49/70] Chore(example): Fix go example --- examples/go/go_app/go.mod | 2 +- examples/go/go_app/internal/service/service.go | 2 +- examples/go/go_app/main.go | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/go/go_app/go.mod b/examples/go/go_app/go.mod index 5b7da97..d95a61b 100644 --- a/examples/go/go_app/go.mod +++ b/examples/go/go_app/go.mod @@ -1,6 +1,6 @@ // Copyright 2025 Canonical Ltd. // See LICENSE file for licensing details. -module go-tracing-app +module go-app go 1.22.7 diff --git a/examples/go/go_app/internal/service/service.go b/examples/go/go_app/internal/service/service.go index d7d9115..bd4cfd9 100644 --- a/examples/go/go_app/internal/service/service.go +++ b/examples/go/go_app/internal/service/service.go @@ -21,7 +21,7 @@ var lemonsKey = attribute.Key("ex.com/lemons") func SubOperation(ctx context.Context) error { // Using global provider. Alternative is to have application provide a getter // for its component to get the instance of the provider. - tr := otel.Tracer("example.com/go-tracing-app") + tr := otel.Tracer("example.com/go-app") var span trace.Span _, span = tr.Start(ctx, "Sub operation...") diff --git a/examples/go/go_app/main.go b/examples/go/go_app/main.go index efa2601..a430ca1 100644 --- a/examples/go/go_app/main.go +++ b/examples/go/go_app/main.go @@ -7,7 +7,7 @@ import ( "context" "errors" "fmt" - "go-tracing-app/internal/service" + "go-app/internal/service" "io" "log" "os" @@ -96,7 +96,7 @@ func main() { } // Create a named tracer with package path as its name. - tracer := tp.Tracer("example.com/go-tracing-app") + tracer := tp.Tracer("example.com/go-app") defer func() { _ = tp.Shutdown(ctx) }() From d2705ecbf400f5592db05f85f5664cf3abe14415 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 20 Jan 2025 08:33:43 +0300 Subject: [PATCH 50/70] Chore(example): Remove commented out line --- examples/go/go_app/main.go | 8 -------- 1 file changed, 8 deletions(-) diff --git a/examples/go/go_app/main.go b/examples/go/go_app/main.go index a430ca1..56e550b 100644 --- a/examples/go/go_app/main.go +++ b/examples/go/go_app/main.go @@ -64,13 +64,6 @@ func (h mainHandler) servePostgresql(w http.ResponseWriter, r *http.Request) { } } - -// var ( -// fooKey = attribute.Key("ex.com/foo") -// barKey = attribute.Key("ex.com/bar") -// anotherKey = attribute.Key("ex.com/another") -// ) - var tp *sdktrace.TracerProvider // initTracer creates and registers trace provider instance. @@ -104,7 +97,6 @@ func main() { ctx, span = tracer.Start(ctx, "operation") defer span.End() span.AddEvent("Nice operation!", trace.WithAttributes(attribute.Int("bogons", 100))) - // span.SetAttributes(anotherKey.String("yes")) if err := service.SubOperation(ctx); err != nil { panic(err) } From ddc57f96bc89a76707b636957ea72c3ef46e8839 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 20 Jan 2025 09:37:12 +0300 Subject: [PATCH 51/70] Chore(test): Fix --- tests/integration/conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 4bc8eb4..3605ae0 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -13,6 +13,7 @@ from juju.model import Controller, Model from pytest import Config, FixtureRequest from pytest_operator.plugin import OpsTest +from tests.integration.helpers import inject_charm_config, inject_venv PROJECT_ROOT = pathlib.Path(__file__).parent.parent.parent logger = logging.getLogger(__name__) From c64b3fbb0fef53596601b6ef79f71d7aefd1a23e Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 20 Jan 2025 11:20:11 +0300 Subject: [PATCH 52/70] Chore(lint): Lint --- tests/integration/conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 3605ae0..afab402 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -13,6 +13,7 @@ from juju.model import Controller, Model from pytest import Config, FixtureRequest from pytest_operator.plugin import OpsTest + from tests.integration.helpers import inject_charm_config, inject_venv PROJECT_ROOT = pathlib.Path(__file__).parent.parent.parent From ad85be95ab7d8536c004327cd938eba6fa5d18e9 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Mon, 20 Jan 2025 16:12:37 +0300 Subject: [PATCH 53/70] Chore(template): turned gunicorn conf into jinja template --- requirements.txt | 1 + src/paas_charm/_gunicorn/webserver.py | 57 ++++++++------------ src/paas_charm/templates/gunicorn.conf.py.j2 | 21 ++++++++ tests/unit/flask/test_webserver.py | 7 ++- 4 files changed, 50 insertions(+), 36 deletions(-) create mode 100644 src/paas_charm/templates/gunicorn.conf.py.j2 diff --git a/requirements.txt b/requirements.txt index 38f68e7..f42a1db 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,3 +2,4 @@ cosl jsonschema >=4.23,<4.24 ops >= 2.6 pydantic==2.10.3 +Jinja2 diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index d34c2ac..c812f4f 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -8,10 +8,10 @@ import pathlib import shlex import signal -import textwrap import typing from enum import Enum +import jinja2 import ops from ops.pebble import ExecError, PathError @@ -137,7 +137,7 @@ def _config(self) -> str: Returns: The content of the Gunicorn configuration file. """ - config_entries = [] + config_entries = {} for setting, setting_value in self._webserver_config.items(): setting_value = typing.cast( None | str | WorkerClassEnum | int | datetime.timedelta, setting_value @@ -146,58 +146,47 @@ def _config(self) -> str: continue if setting_value is None: continue - setting_value = ( + config_entries[setting] = ( setting_value if isinstance(setting_value, (int, str)) else int(setting_value.total_seconds()) ) - config_entries.append(f"{setting} = {setting_value}") if enable_pebble_log_forwarding(): - access_log = "'-'" - error_log = "'-'" + access_log = "-" + error_log = "-" else: - access_log = repr( + access_log = str( APPLICATION_LOG_FILE_FMT.format(framework=self._workload_config.framework) ) - error_log = repr( + error_log = str( APPLICATION_ERROR_LOG_FILE_FMT.format(framework=self._workload_config.framework) ) - config = textwrap.dedent( - f"""\ - bind = ['0.0.0.0:{self._workload_config.port}'] - chdir = {repr(str(self._workload_config.app_dir))} - accesslog = {access_log} - errorlog = {error_log} - statsd_host = {repr(STATSD_HOST)} - """ - ) + framework_environments = None + enable_tracing = False plan = self._container.get_plan().to_dict() services = plan.get("services", None) if services: service_framework = services.get(self._workload_config.framework, None) if service_framework: framework_environments = service_framework.get("environment", None) - if framework_environments and framework_environments.get( "OTEL_EXPORTER_OTLP_ENDPOINT", None ): - config += textwrap.dedent( - """\ - from opentelemetry import trace - from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( - OTLPSpanExporter, - ) - from opentelemetry.sdk.trace import TracerProvider - from opentelemetry.sdk.trace.export import BatchSpanProcessor - - def post_fork(server, worker): - trace.set_tracer_provider(TracerProvider()) - span_processor = BatchSpanProcessor(OTLPSpanExporter()) - trace.get_tracer_provider().add_span_processor(span_processor) - """ - ) - config += "\n".join(config_entries) + enable_tracing = True + + jinja_environment = jinja2.Environment( + loader=jinja2.PackageLoader("paas_charm", "templates"), autoescape=True + ) + config = jinja_environment.get_template("gunicorn.conf.py.j2").render( + workload_port=self._workload_config.port, + workload_app_dir=str(self._workload_config.app_dir), + access_log=access_log, + error_log=error_log, + statsd_host=str(STATSD_HOST), + enable_tracing=enable_tracing, + config_entries=config_entries, + ) return config @property diff --git a/src/paas_charm/templates/gunicorn.conf.py.j2 b/src/paas_charm/templates/gunicorn.conf.py.j2 new file mode 100644 index 0000000..694426c --- /dev/null +++ b/src/paas_charm/templates/gunicorn.conf.py.j2 @@ -0,0 +1,21 @@ +bind = ['0.0.0.0:{{workload_port}}'] +chdir = '{{workload_app_dir}}' +accesslog = '{{access_log}}' +errorlog = '{{error_log}}' +statsd_host = '{{statsd_host}}' +{% if enable_tracing %} +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( + OTLPSpanExporter, +) +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor + +def post_fork(server, worker): + trace.set_tracer_provider(TracerProvider()) + span_processor = BatchSpanProcessor(OTLPSpanExporter()) + trace.get_tracer_provider().add_span_processor(span_processor) +{% endif %} +{%- for key, value in config_entries.items() -%} + {{ key }} = {{ value }} +{% endfor %} diff --git a/tests/unit/flask/test_webserver.py b/tests/unit/flask/test_webserver.py index 5652484..eaccd64 100644 --- a/tests/unit/flask/test_webserver.py +++ b/tests/unit/flask/test_webserver.py @@ -32,7 +32,8 @@ accesslog = '/var/log/flask/access.log' errorlog = '/var/log/flask/error.log' statsd_host = 'localhost:9125' - workers = 10""" + workers = 10 + """ ), id="workers=10", ), @@ -48,7 +49,8 @@ statsd_host = 'localhost:9125' threads = 2 keepalive = 4 - timeout = 3""" + timeout = 3 + """ ), id="threads=2,timeout=3,keepalive=4", ), @@ -62,6 +64,7 @@ accesslog = '/var/log/flask/access.log' errorlog = '/var/log/flask/error.log' statsd_host = 'localhost:9125' + from opentelemetry import trace from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( OTLPSpanExporter, From 7d783d1ad7935979a613ea0d265fab966bacb7fd Mon Sep 17 00:00:00 2001 From: ali ugur Date: Tue, 21 Jan 2025 07:25:32 +0300 Subject: [PATCH 54/70] Chore(test): Updates app fixtures in integrations tests --- tests/integration/conftest.py | 27 ++++++++++++++++--- .../integration/integrations/test_tracing.py | 20 +++----------- 2 files changed, 27 insertions(+), 20 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index afab402..f2527e5 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -9,6 +9,7 @@ import pytest_asyncio from juju.application import Application from juju.client.jujudata import FileJujuData +from juju.errors import JujuError from juju.juju import Juju from juju.model import Controller, Model from pytest import Config, FixtureRequest @@ -104,6 +105,7 @@ async def django_app_fixture( tmp_path_factory, model: Model, django_app_image: str, + postgresql_k8s: Application, ): """Build and deploy the Django charm with django-app image.""" app_name = "django-k8s" @@ -120,6 +122,8 @@ async def django_app_fixture( application_name=app_name, series="jammy", ) + await model.integrate(app_name, postgresql_k8s.name) + await model.wait_for_idle(apps=[app_name, postgresql_k8s.name], status="active", timeout=300) return app @@ -130,6 +134,7 @@ async def fastapi_app_fixture( tmp_path_factory, model: Model, fastapi_app_image: str, + postgresql_k8s: Application, ): """Build and deploy the FastAPI charm with fastapi-app image.""" app_name = "fastapi-k8s" @@ -139,6 +144,8 @@ async def fastapi_app_fixture( } charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "fastapi") app = await model.deploy(charm_file, resources=resources, application_name=app_name) + await model.integrate(app_name, postgresql_k8s.name) + await model.wait_for_idle(apps=[app_name, postgresql_k8s.name], status="active", timeout=300) return app @@ -149,6 +156,7 @@ async def go_app_fixture( tmp_path_factory, model: Model, go_app_image: str, + postgresql_k8s, ): """Build and deploy the Go charm with go-app image.""" app_name = "go-k8s" @@ -158,6 +166,8 @@ async def go_app_fixture( } charm_file = await build_charm_file(pytestconfig, ops_test, tmp_path_factory, "go") app = await model.deploy(charm_file, resources=resources, application_name=app_name) + await model.integrate(app_name, postgresql_k8s.name) + await model.wait_for_idle(apps=[app_name, postgresql_k8s.name], status="active", timeout=300) return app @@ -293,10 +303,19 @@ async def deploy_postgres_fixture(ops_test: OpsTest, model: Model): """Deploy postgres k8s charm.""" _, status, _ = await ops_test.juju("status", "--format", "json") version = json.loads(status)["model"]["version"] - if tuple(map(int, (version.split(".")))) >= (3, 4, 0): - return await model.deploy("postgresql-k8s", channel="14/stable", trust=True) - else: - return await model.deploy("postgresql-k8s", channel="14/stable", revision=300, trust=True) + try: + if tuple(map(int, (version.split(".")))) >= (3, 4, 0): + return await model.deploy("postgresql-k8s", channel="14/stable", trust=True) + else: + return await model.deploy( + "postgresql-k8s", channel="14/stable", revision=300, trust=True + ) + except JujuError as e: + if 'cannot add application "postgresql-k8s": application already exists' in e.message: + logger.info("Application 'postgresql-k8s' already exists") + return model.applications["postgresql-k8s"] + else: + raise e @pytest_asyncio.fixture(scope="module", name="redis_k8s_app") diff --git a/tests/integration/integrations/test_tracing.py b/tests/integration/integrations/test_tracing.py index ff32ae3..6d3f162 100644 --- a/tests/integration/integrations/test_tracing.py +++ b/tests/integration/integrations/test_tracing.py @@ -47,29 +47,17 @@ async def test_workload_tracing( tempo_app = await request.getfixturevalue("tempo_app") except Exception as e: logger.info(f"Tempo is already deployed {e}") - + tempo_app = model.applications["tempo"] tracing_app = request.getfixturevalue(tracing_app_fixture) - idle_list = [tracing_app.name] - - if tracing_app.name != "flask-k8s": - try: - postgresql_app = request.getfixturevalue("postgresql_k8s") - except Exception as e: - logger.info(f"Postgres is already deployed {e}") - await model.integrate(tracing_app.name, "postgresql-k8s") - idle_list.append("postgresql-k8s") - await model.wait_for_idle(apps=idle_list, status="active", timeout=300) - - tempo_app_name = "tempo" - await ops_test.model.integrate(f"{tracing_app.name}:tracing", f"{tempo_app_name}:tracing") + await ops_test.model.integrate(f"{tracing_app.name}:tracing", f"{tempo_app.name}:tracing") await ops_test.model.wait_for_idle( - apps=[tracing_app.name, tempo_app_name], status="active", timeout=600 + apps=[tracing_app.name, tempo_app.name], status="active", timeout=600 ) unit_ip = (await get_unit_ips(tracing_app.name))[0] - tempo_host = (await get_unit_ips(tempo_app_name))[0] + tempo_host = (await get_unit_ips(tempo_app.name))[0] async def _fetch_page(session): async with session.get(f"http://{unit_ip}:{port}") as response: From 0cbc108141ea2a1bedfb5529213bc1b8e4f7c067 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Fri, 24 Jan 2025 22:13:24 +0300 Subject: [PATCH 55/70] chore(): Initial --- examples/flask/charmcraft.yaml | 4 + .../lib/charms/smtp_integrator/v0/smtp.py | 373 ++++++++++++++++++ src/paas_charm/app.py | 18 + src/paas_charm/charm.py | 32 ++ src/paas_charm/charm_state.py | 88 +++++ tests/conftest.py | 1 + tests/integration/flask/conftest.py | 76 ++++ tests/integration/flask/test_integrations.py | 43 ++ tox.ini | 2 + 9 files changed, 637 insertions(+) create mode 100644 examples/flask/lib/charms/smtp_integrator/v0/smtp.py diff --git a/examples/flask/charmcraft.yaml b/examples/flask/charmcraft.yaml index e2fea8c..d6ce75c 100644 --- a/examples/flask/charmcraft.yaml +++ b/examples/flask/charmcraft.yaml @@ -133,6 +133,10 @@ requires: interface: rabbitmq optional: True limit: 1 + smtp: + interface: smtp + optional: True + limit: 1 resources: flask-app-image: diff --git a/examples/flask/lib/charms/smtp_integrator/v0/smtp.py b/examples/flask/lib/charms/smtp_integrator/v0/smtp.py new file mode 100644 index 0000000..d769b1a --- /dev/null +++ b/examples/flask/lib/charms/smtp_integrator/v0/smtp.py @@ -0,0 +1,373 @@ +# Copyright 2025 Canonical Ltd. +# Licensed under the Apache2.0. See LICENSE file in charm source for details. + +"""Library to manage the integration with the SMTP Integrator charm. + +This library contains the Requires and Provides classes for handling the integration +between an application and a charm providing the `smtp` and `smtp-legacy` integrations. +If the requirer charm supports secrets, the preferred approach is to use the `smtp` +relation to leverage them. +This library also contains a `SmtpRelationData` class to wrap the SMTP data that will +be shared via the integration. + +### Requirer Charm + +```python + +from charms.smtp_integrator.v0.smtp import SmtpDataAvailableEvent, SmtpRequires + +class SmtpRequirerCharm(ops.CharmBase): + def __init__(self, *args): + super().__init__(*args) + self.smtp = smtp.SmtpRequires(self) + self.framework.observe(self.smtp.on.smtp_data_available, self._handler) + ... + + def _handler(self, events: SmtpDataAvailableEvent) -> None: + ... + +``` + +As shown above, the library provides a custom event to handle the scenario in +which new SMTP data has been added or updated. + +### Provider Charm + +Following the previous example, this is an example of the provider charm. + +```python +from charms.smtp_integrator.v0.smtp import SmtpProvides + +class SmtpProviderCharm(ops.CharmBase): + def __init__(self, *args): + super().__init__(*args) + self.smtp = SmtpProvides(self) + ... + +``` +The SmtpProvides object wraps the list of relations into a `relations` property +and provides an `update_relation_data` method to update the relation data by passing +a `SmtpRelationData` data object. + +```python +class SmtpProviderCharm(ops.CharmBase): + ... + + def _on_config_changed(self, _) -> None: + for relation in self.model.relations[self.smtp.relation_name]: + self.smtp.update_relation_data(relation, self._get_smtp_data()) + +``` +""" + +# The unique Charmhub library identifier, never change it +LIBID = "09583c2f9c1d4c0f9a40244cfc20b0c2" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 14 + +PYDEPS = ["pydantic>=2"] + +# pylint: disable=wrong-import-position +import itertools +import logging +import typing +from ast import literal_eval +from enum import Enum +from typing import Dict, Optional + +import ops +from pydantic import BaseModel, Field, ValidationError + +logger = logging.getLogger(__name__) + +DEFAULT_RELATION_NAME = "smtp" +LEGACY_RELATION_NAME = "smtp-legacy" + + +class TransportSecurity(str, Enum): + """Represent the transport security values. + + Attributes: + NONE: none + STARTTLS: starttls + TLS: tls + """ + + NONE = "none" + STARTTLS = "starttls" + TLS = "tls" + + +class AuthType(str, Enum): + """Represent the auth type values. + + Attributes: + NONE: none + NOT_PROVIDED: not_provided + PLAIN: plain + """ + + NONE = "none" + NOT_PROVIDED = "not_provided" + PLAIN = "plain" + + +class SmtpRelationData(BaseModel): + """Represent the relation data. + + Attributes: + host: The hostname or IP address of the outgoing SMTP relay. + port: The port of the outgoing SMTP relay. + user: The SMTP AUTH user to use for the outgoing SMTP relay. + password: The SMTP AUTH password to use for the outgoing SMTP relay. + password_id: The secret ID where the SMTP AUTH password for the SMTP relay is stored. + auth_type: The type used to authenticate with the SMTP relay. + transport_security: The security protocol to use for the outgoing SMTP relay. + domain: The domain used by the emails sent from SMTP relay. + skip_ssl_verify: Specifies if certificate trust verification is skipped in the SMTP relay. + """ + + host: str = Field(..., min_length=1) + port: int = Field(..., ge=1, le=65536) + user: Optional[str] = None + password: Optional[str] = None + password_id: Optional[str] = None + auth_type: AuthType + transport_security: TransportSecurity + domain: Optional[str] = None + skip_ssl_verify: Optional[bool] = False + + def to_relation_data(self) -> Dict[str, str]: + """Convert an instance of SmtpRelationData to the relation representation. + + Returns: + Dict containing the representation. + """ + result = { + "host": str(self.host), + "port": str(self.port), + "auth_type": self.auth_type.value, + "transport_security": self.transport_security.value, + "skip_ssl_verify": str(self.skip_ssl_verify), + } + if self.domain: + result["domain"] = self.domain + if self.user: + result["user"] = self.user + if self.password: + result["password"] = self.password + if self.password_id: + result["password_id"] = self.password_id + return result + + +class SmtpDataAvailableEvent(ops.RelationEvent): + """Smtp event emitted when relation data has changed. + + Attributes: + host: The hostname or IP address of the outgoing SMTP relay. + port: The port of the outgoing SMTP relay. + user: The SMTP AUTH user to use for the outgoing SMTP relay. + password: The SMTP AUTH password to use for the outgoing SMTP relay. + password_id: The secret ID where the SMTP AUTH password for the SMTP relay is stored. + auth_type: The type used to authenticate with the SMTP relay. + transport_security: The security protocol to use for the outgoing SMTP relay. + domain: The domain used by the emails sent from SMTP relay. + skip_ssl_verify: Specifies if certificate trust verification is skipped in the SMTP relay. + """ + + @property + def host(self) -> str: + """Fetch the SMTP host from the relation.""" + assert self.relation.app + return typing.cast(str, self.relation.data[self.relation.app].get("host")) + + @property + def port(self) -> int: + """Fetch the SMTP port from the relation.""" + assert self.relation.app + return int(typing.cast(str, self.relation.data[self.relation.app].get("port"))) + + @property + def user(self) -> str: + """Fetch the SMTP user from the relation.""" + assert self.relation.app + return typing.cast(str, self.relation.data[self.relation.app].get("user")) + + @property + def password(self) -> str: + """Fetch the SMTP password from the relation.""" + assert self.relation.app + return typing.cast(str, self.relation.data[self.relation.app].get("password")) + + @property + def password_id(self) -> str: + """Fetch the SMTP password from the relation.""" + assert self.relation.app + return typing.cast(str, self.relation.data[self.relation.app].get("password_id")) + + @property + def auth_type(self) -> AuthType: + """Fetch the SMTP auth type from the relation.""" + assert self.relation.app + return AuthType(self.relation.data[self.relation.app].get("auth_type")) + + @property + def transport_security(self) -> TransportSecurity: + """Fetch the SMTP transport security protocol from the relation.""" + assert self.relation.app + return TransportSecurity(self.relation.data[self.relation.app].get("transport_security")) + + @property + def domain(self) -> str: + """Fetch the SMTP domain from the relation.""" + assert self.relation.app + return typing.cast(str, self.relation.data[self.relation.app].get("domain")) + + @property + def skip_ssl_verify(self) -> bool: + """Fetch the skip_ssl_verify flag from the relation.""" + assert self.relation.app + return literal_eval( + typing.cast(str, self.relation.data[self.relation.app].get("skip_ssl_verify")) + ) + + +class SmtpRequiresEvents(ops.CharmEvents): + """SMTP events. + + This class defines the events that a SMTP requirer can emit. + + Attributes: + smtp_data_available: the SmtpDataAvailableEvent. + """ + + smtp_data_available = ops.EventSource(SmtpDataAvailableEvent) + + +class SmtpRequires(ops.Object): + """Requirer side of the SMTP relation. + + Attributes: + on: events the provider can emit. + """ + + on = SmtpRequiresEvents() + + def __init__(self, charm: ops.CharmBase, relation_name: str = DEFAULT_RELATION_NAME) -> None: + """Construct. + + Args: + charm: the provider charm. + relation_name: the relation name. + """ + super().__init__(charm, relation_name) + self.charm = charm + self.relation_name = relation_name + self.framework.observe(charm.on[relation_name].relation_changed, self._on_relation_changed) + + def get_relation_data(self) -> Optional[SmtpRelationData]: + """Retrieve the relation data. + + Returns: + SmtpRelationData: the relation data. + """ + relation = self.model.get_relation(self.relation_name) + return self._get_relation_data_from_relation(relation) if relation else None + + def _get_relation_data_from_relation( + self, relation: ops.Relation + ) -> Optional[SmtpRelationData]: + """Retrieve the relation data. + + Args: + relation: the relation to retrieve the data from. + + Returns: + SmtpRelationData: the relation data. + """ + assert relation.app + relation_data = relation.data[relation.app] + if not relation_data: + return None + return SmtpRelationData( + host=typing.cast(str, relation_data.get("host")), + port=typing.cast(int, relation_data.get("port")), + user=relation_data.get("user"), + password=relation_data.get("password"), + password_id=relation_data.get("password_id"), + auth_type=AuthType(relation_data.get("auth_type")), + transport_security=TransportSecurity(relation_data.get("transport_security")), + domain=relation_data.get("domain"), + skip_ssl_verify=typing.cast(bool, relation_data.get("skip_ssl_verify")), + ) + + def _is_relation_data_valid(self, relation: ops.Relation) -> bool: + """Validate the relation data. + + Args: + relation: the relation to validate. + + Returns: + true: if the relation data is valid. + """ + try: + _ = self._get_relation_data_from_relation(relation) + return True + except ValidationError as ex: + error_fields = set( + itertools.chain.from_iterable(error["loc"] for error in ex.errors()) + ) + error_field_str = " ".join(f"{f}" for f in error_fields) + logger.warning("Error validation the relation data %s", error_field_str) + return False + + def _on_relation_changed(self, event: ops.RelationChangedEvent) -> None: + """Event emitted when the relation has changed. + + Args: + event: event triggering this handler. + """ + assert event.relation.app + relation_data = event.relation.data[event.relation.app] + if relation_data: + if relation_data["auth_type"] == AuthType.NONE.value: + logger.warning('Insecure setting: auth_type has a value "none"') + if relation_data["transport_security"] == TransportSecurity.NONE.value: + logger.warning('Insecure setting: transport_security has value "none"') + if self._is_relation_data_valid(event.relation): + self.on.smtp_data_available.emit(event.relation, app=event.app, unit=event.unit) + + +class SmtpProvides(ops.Object): + """Provider side of the SMTP relation.""" + + def __init__(self, charm: ops.CharmBase, relation_name: str = DEFAULT_RELATION_NAME) -> None: + """Construct. + + Args: + charm: the provider charm. + relation_name: the relation name. + """ + super().__init__(charm, relation_name) + self.charm = charm + self.relation_name = relation_name + + def update_relation_data(self, relation: ops.Relation, smtp_data: SmtpRelationData) -> None: + """Update the relation data. + + Args: + relation: the relation for which to update the data. + smtp_data: a SmtpRelationData instance wrapping the data to be updated. + """ + relation_data = smtp_data.to_relation_data() + if relation_data["auth_type"] == AuthType.NONE.value: + logger.warning('Insecure setting: auth_type has a value "none"') + if relation_data["transport_security"] == TransportSecurity.NONE.value: + logger.warning('Insecure setting: transport_security has value "none"') + relation.data[self.charm.model.app].update(relation_data) diff --git a/src/paas_charm/app.py b/src/paas_charm/app.py index f32414a..9c0f635 100644 --- a/src/paas_charm/app.py +++ b/src/paas_charm/app.py @@ -231,6 +231,7 @@ def _app_layer(self) -> ops.pebble.LayerDict: "command" ] = self._alternate_service_command + for service_name, service in services.items(): normalised_service_name = service_name.lower() # Add environment variables to all worker processes. @@ -315,6 +316,23 @@ def map_integrations_to_env(integrations: IntegrationsState, prefix: str = "") - rabbitmq_envvars = _rabbitmq_uri_to_env_variables("RABBITMQ", integrations.rabbitmq_uri) env.update(rabbitmq_envvars) + if integrations.smtp_parameters: + smtp = integrations.smtp_parameters + env.update( + (k, v) + for k, v in ( + ("SMTP_HOST", smtp.host), + ("SMTP_PORT", str(smtp.port)), + ("SMTP_USER", smtp.user), + ("SMTP_PASSWORD", smtp.password), + ("SMTP_PASSWORD_ID", smtp.password_id), + ("SMTP_AUTH_TYPE", smtp.auth_type), + ("SMTP_TRANSPORT_SECURITY", smtp.transport_security), + ("SMTP_DOMAIN", smtp.domain), + ("SMTP_SKIP_SSL_VERIFY", smtp.skip_ssl_verify), + ) + if v is not None + ) return {prefix + k: v for k, v in env.items()} diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index ddb0151..d17d55f 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -44,6 +44,14 @@ "Missing charm library, please run `charmcraft fetch-lib charms.saml_integrator.v0.saml`" ) +try: + # pylint: disable=ungrouped-imports + from charms.smtp_integrator.v0.smtp import SmtpRequires +except ImportError: + logger.exception( + "Missing charm library, please run `charmcraft fetch-lib charms.smtp_integrator.v0.smtp`" + ) + class PaasCharm(abc.ABC, ops.CharmBase): # pylint: disable=too-many-instance-attributes """PaasCharm base charm service mixin. @@ -117,6 +125,12 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: else: self._rabbitmq = None + if "smtp" in requires and requires["smtp"].interface_name == "smtp": + self._smtp = SmtpRequires(self) + self.framework.observe(self._smtp.on.smtp_data_available, self._on_smtp_data_available) + else: + self._smtp = None + self._database_migration = DatabaseMigration( container=self.unit.get_container(self._workload_config.container_name), state_dir=self._workload_config.state_dir, @@ -267,6 +281,7 @@ def is_ready(self) -> bool: return True # Pending to refactor all integrations + # pylint: disable=too-many-branches def _missing_required_integrations(self, charm_state: CharmState) -> list[str]: # noqa: C901 """Get list of missing integrations that are required. @@ -297,6 +312,9 @@ def _missing_required_integrations(self, charm_state: CharmState) -> list[str]: if self._rabbitmq and not charm_state.integrations.rabbitmq_uri: if not requires["rabbitmq"].optional: missing_integrations.append("rabbitmq") + if self._smtp and not charm_state.integrations.smtp_parameters: + if not requires["smtp"].optional: + missing_integrations.append("smtp") return missing_integrations def restart(self, rerun_migrations: bool = False) -> None: @@ -341,6 +359,14 @@ def _create_charm_state(self) -> CharmState: saml_relation_data = None if self._saml and (saml_data := self._saml.get_relation_data()): saml_relation_data = saml_data.to_relation_data() + smtp_relation_data = None + if self._smtp and (smtp_data := self._smtp.get_relation_data()): + smtp_relation_data = smtp_data.to_relation_data() + logger.info(f"===========smtp_relation_data: {smtp_relation_data}") + if smtp_relation_data.get("password_id", None): + secret = self.model.get_secret(id=smtp_relation_data.password_id) + content = secret.get_content() + logger.info(f"Setting SMTP secret content: {content}") charm_config = {k: config_get_with_secret(self, k) for k in self.config.keys()} config = typing.cast( dict, @@ -360,6 +386,7 @@ def _create_charm_state(self) -> CharmState: saml_relation_data=saml_relation_data, rabbitmq_uri=self._rabbitmq.rabbitmq_uri() if self._rabbitmq else None, base_url=self._base_url, + smtp_relation_data=smtp_relation_data, ) @property @@ -473,3 +500,8 @@ def _on_rabbitmq_ready(self, _: ops.HookEvent) -> None: def _on_rabbitmq_departed(self, _: ops.HookEvent) -> None: """Handle rabbitmq departed event.""" self.restart() + + @block_if_invalid_config + def _on_smtp_data_available(self, _: ops.HookEvent) -> None: + """Handle smtp data available event.""" + self.restart() diff --git a/src/paas_charm/charm_state.py b/src/paas_charm/charm_state.py index 6b24203..48bdda3 100644 --- a/src/paas_charm/charm_state.py +++ b/src/paas_charm/charm_state.py @@ -7,6 +7,7 @@ import re import typing from dataclasses import dataclass, field +from enum import Enum from typing import Optional from charms.data_platform_libs.v0.data_interfaces import DatabaseRequires @@ -89,6 +90,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None = None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, + smtp_relation_data: dict | None = None, base_url: str | None = None, ) -> "CharmState": """Initialize a new instance of the CharmState class from the associated charm. @@ -103,6 +105,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info: Connection info from S3 lib. saml_relation_data: Relation data from the SAML app. rabbitmq_uri: RabbitMQ uri. + smtp_relation_data: Relation data from the SMTP app. base_url: Base URL for the service. Return: @@ -123,6 +126,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info=s3_connection_info, saml_relation_data=saml_relation_data, rabbitmq_uri=rabbitmq_uri, + smtp_relation_data=smtp_relation_data, ) return cls( framework=framework, @@ -209,6 +213,7 @@ class IntegrationsState: s3_parameters: S3 parameters. saml_parameters: SAML parameters. rabbitmq_uri: RabbitMQ uri. + smtp_parameters: SMTP parameters. """ redis_uri: str | None = None @@ -216,8 +221,10 @@ class IntegrationsState: s3_parameters: "S3Parameters | None" = None saml_parameters: "SamlParameters | None" = None rabbitmq_uri: str | None = None + smtp_parameters: "SmtpParameters | None" = None # This dataclass combines all the integrations, so it is reasonable that they stay together. + # flake8: noqa: C901 @classmethod def build( # pylint: disable=too-many-arguments cls, @@ -227,6 +234,7 @@ def build( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, + smtp_relation_data: dict | None = None, ) -> "IntegrationsState": """Initialize a new instance of the IntegrationsState class. @@ -238,6 +246,7 @@ def build( # pylint: disable=too-many-arguments s3_connection_info: S3 connection info from S3 lib. saml_relation_data: Saml relation data from saml lib. rabbitmq_uri: RabbitMQ uri. + smtp_relation_data: smtp relation data from smtp lib. Return: The IntegrationsState instance created. @@ -276,6 +285,17 @@ def build( # pylint: disable=too-many-arguments if redis_uri is not None and re.fullmatch(r"redis://[^:/]+:None", redis_uri): redis_uri = None + if smtp_relation_data is not None: + try: + smtp_parameters = SmtpParameters(**smtp_relation_data) + except ValidationError as exc: + error_message = build_validation_error_message(exc) + raise CharmConfigInvalidError( + f"Invalid Smtp configuration: {error_message}" + ) from exc + else: + smtp_parameters = None + return cls( redis_uri=redis_uri, databases_uris={ @@ -286,6 +306,7 @@ def build( # pylint: disable=too-many-arguments s3_parameters=s3_parameters, saml_parameters=saml_parameters, rabbitmq_uri=rabbitmq_uri, + smtp_parameters=smtp_parameters, ) @@ -364,3 +385,70 @@ def validate_signing_certificate_exists(cls, certs: str, _: ValidationInfo) -> s if not certificate: raise ValueError("Missing x509certs. There should be at least one certificate.") return certificate + + +class TransportSecurity(str, Enum): + """Represent the transport security values. + + Attributes: + NONE: none + STARTTLS: starttls + TLS: tls + """ + + NONE = "none" + STARTTLS = "starttls" + TLS = "tls" + + +class AuthType(str, Enum): + """Represent the auth type values. + + Attributes: + NONE: none + NOT_PROVIDED: not_provided + PLAIN: plain + """ + + NONE = "none" + NOT_PROVIDED = "not_provided" + PLAIN = "plain" + + +class SmtpParameters(BaseModel, extra=Extra.allow): + """Represent the SMTP relation data. + + Attributes: + host: The hostname or IP address of the outgoing SMTP relay. + port: The port of the outgoing SMTP relay. + user: The SMTP AUTH user to use for the outgoing SMTP relay. + password: The SMTP AUTH password to use for the outgoing SMTP relay. + password_id: The secret ID where the SMTP AUTH password for the SMTP relay is stored. + auth_type: The type used to authenticate with the SMTP relay. + transport_security: The security protocol to use for the outgoing SMTP relay. + domain: The domain used by the emails sent from SMTP relay. + skip_ssl_verify: Specifies if certificate trust verification is skipped in the SMTP relay. + """ + + host: str = Field(..., min_length=1) + port: int = Field(..., ge=1, le=65536) + user: str | None = None + password: str | None = None + password_id: str | None = None + auth_type: AuthType | None = None + transport_security: TransportSecurity | None = None + domain: str | None = None + skip_ssl_verify: str | None = False + + + @field_validator("auth_type") + @classmethod + def validate_auth_type(cls, auth_type: str, _: ValidationInfo) -> str: + if auth_type == AuthType.NONE: + return None + + @field_validator("transport_security") + @classmethod + def validate_transport_security(cls, transport_security: str, _: ValidationInfo) -> str: + if transport_security == TransportSecurity.NONE: + return None diff --git a/tests/conftest.py b/tests/conftest.py index 615192d..d6fd748 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -15,3 +15,4 @@ def pytest_addoption(parser): parser.addoption("--fastapi-app-image", action="store") parser.addoption("--go-app-image", action="store") parser.addoption("--localstack-address", action="store") + parser.addoption("--kube-config", action="store") diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index 9d56d29..9d54b9e 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -3,11 +3,15 @@ """Fixtures for flask charm integration tests.""" +import collections +import logging import os import pathlib +import time from secrets import token_hex import boto3 +import kubernetes import pytest import pytest_asyncio from botocore.config import Config as BotoConfig @@ -21,6 +25,9 @@ PROJECT_ROOT = pathlib.Path(__file__).parent.parent.parent.parent +logger = logging.getLogger(__name__) + + @pytest.fixture(autouse=True) def cwd(): return os.chdir(PROJECT_ROOT / "examples/flask") @@ -348,3 +355,72 @@ async def rabbitmq_k8s_integration_fixture( await flask_app.destroy_relation("rabbitmq", f"{rabbitmq_k8s_app.name}:amqp") await model.wait_for_idle(apps=[flask_app.name, rabbitmq_k8s_app.name], status="active") + + +@pytest.fixture(scope="module", name="load_kube_config") +def load_kube_config_fixture(pytestconfig: pytest.Config): + """Load kubernetes config file.""" + kube_config = pytestconfig.getoption("--kube-config") + kubernetes.config.load_kube_config(config_file=kube_config) + + +@pytest.fixture(scope="module") +def mailcatcher(load_kube_config, ops_test: OpsTest): + """Deploy test mailcatcher service.""" + assert ops_test.model + namespace = ops_test.model.name + v1 = kubernetes.client.CoreV1Api() + pod = kubernetes.client.V1Pod( + api_version="v1", + kind="Pod", + metadata=kubernetes.client.V1ObjectMeta( + name="mailcatcher", + namespace=namespace, + labels={"app.kubernetes.io/name": "mailcatcher"}, + ), + spec=kubernetes.client.V1PodSpec( + containers=[ + kubernetes.client.V1Container( + name="mailcatcher", + image="sj26/mailcatcher", + ports=[ + kubernetes.client.V1ContainerPort(container_port=1025), + kubernetes.client.V1ContainerPort(container_port=1080), + ], + ) + ], + ), + ) + v1.create_namespaced_pod(namespace=namespace, body=pod) + service = kubernetes.client.V1Service( + api_version="v1", + kind="Service", + metadata=kubernetes.client.V1ObjectMeta(name="mailcatcher-service", namespace=namespace), + spec=kubernetes.client.V1ServiceSpec( + type="ClusterIP", + ports=[ + kubernetes.client.V1ServicePort(port=1025, target_port=1025, name="tcp-1025"), + kubernetes.client.V1ServicePort(port=1080, target_port=1080, name="tcp-1080"), + ], + selector={"app.kubernetes.io/name": "mailcatcher"}, + ), + ) + v1.create_namespaced_service(namespace=namespace, body=service) + deadline = time.time() + 300 + while True: + if time.time() > deadline: + raise TimeoutError("timeout while waiting for mailcatcher pod") + try: + pod = v1.read_namespaced_pod(name="mailcatcher", namespace=namespace) + if pod.status.phase == "Running": + logger.info("mailcatcher running at %s", pod.status.pod_ip) + break + except kubernetes.client.ApiException: + pass + logger.info("waiting for mailcatcher pod") + time.sleep(1) + SmtpCredential = collections.namedtuple("SmtpCredential", "host port") + return SmtpCredential( + host=f"mailcatcher-service.{namespace}.svc.cluster.local", + port=1025, + ) diff --git a/tests/integration/flask/test_integrations.py b/tests/integration/flask/test_integrations.py index ff7b1e5..5d467e8 100644 --- a/tests/integration/flask/test_integrations.py +++ b/tests/integration/flask/test_integrations.py @@ -167,3 +167,46 @@ async def test_saml_integration( entity_id_url._replace(path="sso") ) assert env["SAML_SIGNING_CERTIFICATE"] in saml_helper.CERTIFICATE.replace("\n", "") + + +async def test_smtp_integration( + ops_test: OpsTest, + flask_app: Application, + model: Model, + get_unit_ips, + mailcatcher, +): + """ + arrange: build and deploy the flask charm. Create the s3 bucket. + act: Integrate the charm with the s3-integrator. + assert: the flask application should return in the endpoint /env + the correct S3 env variables. + """ + smtp_config = { + "auth_type": "none", + "domain": "example.com", + "host": mailcatcher.host, + "port": mailcatcher.port, + } + smtp_integrator_app = await model.deploy( + "smtp-integrator", + channel="latest/edge", + config=smtp_config, + ) + await model.wait_for_idle() + await model.add_relation(flask_app.name, f"{smtp_integrator_app.name}:smtp") + + await model.wait_for_idle( + idle_period=30, + apps=[flask_app.name, smtp_integrator_app.name], + status="active", + ) + + for unit_ip in await get_unit_ips(flask_app.name): + response = requests.get(f"http://{unit_ip}:8000/env", timeout=5) + assert response.status_code == 200 + env = response.json() + assert env["SMTP_HOST"] == smtp_config["host"] + assert env["SMTP_DOMAIN"] == smtp_config["domain"] + assert env["SMTP_PORT"] == smtp_config["port"] + assert env.get("SMTP_AUTH_TYPE") == None diff --git a/tox.ini b/tox.ini index 9f1dd56..d7b9b2c 100644 --- a/tox.ini +++ b/tox.ini @@ -66,6 +66,8 @@ commands = --skip {toxinidir}/.mypy_cache \ --skip {toxinidir}/examples/django/charm/lib \ --skip {toxinidir}/examples/flask/lib \ + --skip {toxinidir}/examples/fastapi/charm/lib \ + --skip {toxinidir}/examples/go/charm/lib \ --skip {toxinidir}/tests/integration/.mypy_cache # pflake8 wrapper supports config from pyproject.toml pflake8 {[vars]src_path} {[vars]legacy_src_path} From efc08bf6e7f18e07f86285657aef0e5be63e4462 Mon Sep 17 00:00:00 2001 From: ali ugur Date: Fri, 24 Jan 2025 22:14:24 +0300 Subject: [PATCH 56/70] chore(): Needs fixing in unit tests --- app.py | 19 ------------- .../tempo_coordinator_k8s/v0/tracing.py | 10 ++++--- src/paas_charm/_gunicorn/charm.py | 4 ++- src/paas_charm/_gunicorn/webserver.py | 15 +---------- src/paas_charm/_gunicorn/workload_config.py | 6 ++++- src/paas_charm/app.py | 2 ++ src/paas_charm/templates/gunicorn.conf.py.j2 | 27 ++++++++++--------- tox.ini | 2 ++ 8 files changed, 33 insertions(+), 52 deletions(-) delete mode 100644 app.py diff --git a/app.py b/app.py deleted file mode 100644 index eb52333..0000000 --- a/app.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# See LICENSE file for licensing details. - -from flask import Flask -import time - -app = Flask(__name__) - - -@app.route("/") -def index(): - return "Hello, world!" - -@app.route("/io") -def io_bound_task(): - start_time = time.time() - time.sleep(2) - duration = time.time() - start_time - return f"I/O task completed in {round(duration, 2)} seconds" diff --git a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py index 363828d..e64c468 100644 --- a/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py +++ b/examples/flask/lib/charms/tempo_coordinator_k8s/v0/tracing.py @@ -104,7 +104,7 @@ def __init__(self, *args): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 3 +LIBPATCH = 4 PYDEPS = ["pydantic"] @@ -885,13 +885,15 @@ def _get_endpoint( filter(lambda i: i.protocol.name == protocol, app_data.receivers) ) if not receivers: - logger.error(f"no receiver found with protocol={protocol!r}") + # it can happen if the charm requests tracing protocols, but the relay (such as grafana-agent) isn't yet + # connected to the tracing backend. In this case, it's not an error the charm author can do anything about + logger.warning(f"no receiver found with protocol={protocol!r}.") return if len(receivers) > 1: - logger.error( + # if we have more than 1 receiver that matches, it shouldn't matter which receiver we'll be using. + logger.warning( f"too many receivers with protocol={protocol!r}; using first one. Found: {receivers}" ) - return receiver = receivers[0] return receiver.url diff --git a/src/paas_charm/_gunicorn/charm.py b/src/paas_charm/_gunicorn/charm.py index a7d41bf..9f0a424 100644 --- a/src/paas_charm/_gunicorn/charm.py +++ b/src/paas_charm/_gunicorn/charm.py @@ -24,7 +24,9 @@ class GunicornBase(PaasCharm): def _workload_config(self) -> WorkloadConfig: """Return a WorkloadConfig instance.""" return create_workload_config( - framework_name=self._framework_name, unit_name=self.unit.name + framework_name=self._framework_name, + unit_name=self.unit.name, + tracing_enabled=bool(self._tracing), ) def create_webserver_config(self) -> WebserverConfig: diff --git a/src/paas_charm/_gunicorn/webserver.py b/src/paas_charm/_gunicorn/webserver.py index c812f4f..3a93635 100644 --- a/src/paas_charm/_gunicorn/webserver.py +++ b/src/paas_charm/_gunicorn/webserver.py @@ -162,19 +162,6 @@ def _config(self) -> str: APPLICATION_ERROR_LOG_FILE_FMT.format(framework=self._workload_config.framework) ) - framework_environments = None - enable_tracing = False - plan = self._container.get_plan().to_dict() - services = plan.get("services", None) - if services: - service_framework = services.get(self._workload_config.framework, None) - if service_framework: - framework_environments = service_framework.get("environment", None) - if framework_environments and framework_environments.get( - "OTEL_EXPORTER_OTLP_ENDPOINT", None - ): - enable_tracing = True - jinja_environment = jinja2.Environment( loader=jinja2.PackageLoader("paas_charm", "templates"), autoescape=True ) @@ -184,7 +171,7 @@ def _config(self) -> str: access_log=access_log, error_log=error_log, statsd_host=str(STATSD_HOST), - enable_tracing=enable_tracing, + enable_tracing=self._workload_config.tracing_enabled, config_entries=config_entries, ) return config diff --git a/src/paas_charm/_gunicorn/workload_config.py b/src/paas_charm/_gunicorn/workload_config.py index b9d007a..388c369 100644 --- a/src/paas_charm/_gunicorn/workload_config.py +++ b/src/paas_charm/_gunicorn/workload_config.py @@ -12,12 +12,15 @@ APPLICATION_ERROR_LOG_FILE_FMT = "/var/log/{framework}/error.log" -def create_workload_config(framework_name: str, unit_name: str) -> WorkloadConfig: +def create_workload_config( + framework_name: str, unit_name: str, tracing_enabled: bool = False +) -> WorkloadConfig: """Create an WorkloadConfig for Gunicorn. Args: framework_name: framework name. unit_name: name of the app unit. + tracing_enabled: if True, tracing is enabled. Returns: new WorkloadConfig @@ -37,4 +40,5 @@ def create_workload_config(framework_name: str, unit_name: str) -> WorkloadConfi ], metrics_target="*:9102", unit_name=unit_name, + tracing_enabled=tracing_enabled, ) diff --git a/src/paas_charm/app.py b/src/paas_charm/app.py index 499e64b..2319093 100644 --- a/src/paas_charm/app.py +++ b/src/paas_charm/app.py @@ -41,6 +41,7 @@ class WorkloadConfig: # pylint: disable=too-many-instance-attributes metrics_target: target to scrape for metrics. metrics_path: path to scrape for metrics. unit_name: Name of the unit. Needed to know if schedulers should run here. + tracing_enabled: True if tracing should be enabled. """ framework: str @@ -56,6 +57,7 @@ class WorkloadConfig: # pylint: disable=too-many-instance-attributes metrics_target: str | None = None metrics_path: str | None = "/metrics" unit_name: str + tracing_enabled: bool = False def should_run_scheduler(self) -> bool: """Return if the unit should run scheduler processes. diff --git a/src/paas_charm/templates/gunicorn.conf.py.j2 b/src/paas_charm/templates/gunicorn.conf.py.j2 index 694426c..6c9c586 100644 --- a/src/paas_charm/templates/gunicorn.conf.py.j2 +++ b/src/paas_charm/templates/gunicorn.conf.py.j2 @@ -1,21 +1,22 @@ -bind = ['0.0.0.0:{{workload_port}}'] -chdir = '{{workload_app_dir}}' -accesslog = '{{access_log}}' -errorlog = '{{error_log}}' -statsd_host = '{{statsd_host}}' -{% if enable_tracing %} +{%- if enable_tracing -%} from opentelemetry import trace -from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( - OTLPSpanExporter, -) +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor +{%- endif -%} +bind = ['0.0.0.0:{{ workload_port }}'] +chdir = '{{ workload_app_dir }}' +accesslog = '{{ access_log }}' +errorlog = '{{ error_log }}' +statsd_host = '{{ statsd_host }}' +{%- for key, value in config_entries.items() %} +{{ key }} = {{ value }} +{%- endfor -%} +{%- if enable_tracing -%} + def post_fork(server, worker): trace.set_tracer_provider(TracerProvider()) span_processor = BatchSpanProcessor(OTLPSpanExporter()) trace.get_tracer_provider().add_span_processor(span_processor) -{% endif %} -{%- for key, value in config_entries.items() -%} - {{ key }} = {{ value }} -{% endfor %} +{%- endif -%} diff --git a/tox.ini b/tox.ini index c51bc2d..f057a74 100644 --- a/tox.ini +++ b/tox.ini @@ -65,6 +65,8 @@ commands = --skip {toxinidir}/venv \ --skip {toxinidir}/.mypy_cache \ --skip {toxinidir}/examples/django/charm/lib \ + --skip {toxinidir}/examples/fastapi/charm/lib \ + --skip {toxinidir}/examples/go/charm/lib \ --skip {toxinidir}/examples/flask/lib \ --skip {toxinidir}/tests/integration/.mypy_cache # pflake8 wrapper supports config from pyproject.toml From dd52794697dfba9063e7a5617aaed03f33939b72 Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Mon, 27 Jan 2025 08:21:50 +0300 Subject: [PATCH 57/70] chore(test): Test --- tests/integration/flask/test_charm.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/flask/test_charm.py b/tests/integration/flask/test_charm.py index 6019ea6..e030529 100644 --- a/tests/integration/flask/test_charm.py +++ b/tests/integration/flask/test_charm.py @@ -41,6 +41,7 @@ async def test_flask_is_up( @pytest.mark.parametrize( "update_config, timeout", [ + pytest.param({"webserver-timeout": 3}, 3, id="timeout=9"), pytest.param({"webserver-timeout": 7}, 7, id="timeout=7"), pytest.param({"webserver-timeout": 5}, 5, id="timeout=5"), pytest.param({"webserver-timeout": 3}, 3, id="timeout=3"), From 4e7d8107104facd4332f2479125209f442c7d153 Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Mon, 27 Jan 2025 08:23:01 +0300 Subject: [PATCH 58/70] Chore(test): Fixed unit tests. --- src/paas_charm/templates/gunicorn.conf.py.j2 | 7 +++-- tests/unit/flask/test_webserver.py | 31 +++++++++----------- 2 files changed, 18 insertions(+), 20 deletions(-) diff --git a/src/paas_charm/templates/gunicorn.conf.py.j2 b/src/paas_charm/templates/gunicorn.conf.py.j2 index 6c9c586..cbd8d42 100644 --- a/src/paas_charm/templates/gunicorn.conf.py.j2 +++ b/src/paas_charm/templates/gunicorn.conf.py.j2 @@ -3,7 +3,8 @@ from opentelemetry import trace from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor -{%- endif -%} + +{% endif -%} bind = ['0.0.0.0:{{ workload_port }}'] chdir = '{{ workload_app_dir }}' accesslog = '{{ access_log }}' @@ -12,11 +13,11 @@ statsd_host = '{{ statsd_host }}' {%- for key, value in config_entries.items() %} {{ key }} = {{ value }} {%- endfor -%} -{%- if enable_tracing -%} +{%- if enable_tracing %} def post_fork(server, worker): trace.set_tracer_provider(TracerProvider()) span_processor = BatchSpanProcessor(OTLPSpanExporter()) trace.get_tracer_provider().add_span_processor(span_processor) -{%- endif -%} +{% endif -%} diff --git a/tests/unit/flask/test_webserver.py b/tests/unit/flask/test_webserver.py index eaccd64..059609d 100644 --- a/tests/unit/flask/test_webserver.py +++ b/tests/unit/flask/test_webserver.py @@ -24,7 +24,7 @@ GUNICORN_CONFIG_TEST_PARAMS = [ pytest.param( {"workers": 10}, - DEFAULT_LAYER, + False, textwrap.dedent( f"""\ bind = ['0.0.0.0:8000'] @@ -32,14 +32,13 @@ accesslog = '/var/log/flask/access.log' errorlog = '/var/log/flask/error.log' statsd_host = 'localhost:9125' - workers = 10 - """ + workers = 10""" ), id="workers=10", ), pytest.param( {"threads": 2, "timeout": 3, "keepalive": 4}, - DEFAULT_LAYER, + False, textwrap.dedent( f"""\ bind = ['0.0.0.0:8000'] @@ -49,28 +48,26 @@ statsd_host = 'localhost:9125' threads = 2 keepalive = 4 - timeout = 3 - """ + timeout = 3""" ), id="threads=2,timeout=3,keepalive=4", ), pytest.param( {}, - LAYER_WITH_TRACING, + True, textwrap.dedent( f"""\ + from opentelemetry import trace + from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + from opentelemetry.sdk.trace import TracerProvider + from opentelemetry.sdk.trace.export import BatchSpanProcessor + bind = ['0.0.0.0:8000'] chdir = '/flask/app' accesslog = '/var/log/flask/access.log' errorlog = '/var/log/flask/error.log' statsd_host = 'localhost:9125' - from opentelemetry import trace - from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( - OTLPSpanExporter, - ) - from opentelemetry.sdk.trace import TracerProvider - from opentelemetry.sdk.trace.export import BatchSpanProcessor def post_fork(server, worker): trace.set_tracer_provider(TracerProvider()) @@ -83,11 +80,11 @@ def post_fork(server, worker): ] -@pytest.mark.parametrize("charm_state_params, layer, config_file", GUNICORN_CONFIG_TEST_PARAMS) +@pytest.mark.parametrize("charm_state_params, tracing_enabled, config_file", GUNICORN_CONFIG_TEST_PARAMS) def test_gunicorn_config( harness: Harness, charm_state_params, - layer, + tracing_enabled, config_file, database_migration_mock, ) -> None: @@ -100,14 +97,14 @@ def test_gunicorn_config( harness.begin() container: ops.Container = harness.model.unit.get_container(FLASK_CONTAINER_NAME) harness.set_can_connect(FLASK_CONTAINER_NAME, True) - container.add_layer("default", layer) + container.add_layer("default", DEFAULT_LAYER) charm_state = CharmState( framework="flask", secret_key="", is_secret_storage_ready=True, ) - workload_config = create_workload_config(framework_name="flask", unit_name="flask/0") + workload_config = create_workload_config(framework_name="flask", unit_name="flask/0", tracing_enabled=tracing_enabled) webserver_config = WebserverConfig(**charm_state_params) webserver = GunicornWebserver( webserver_config=webserver_config, From 2b4923b163c5decb14584ab031c9971328e3097d Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Mon, 27 Jan 2025 09:48:15 +0300 Subject: [PATCH 59/70] Chore(): Address comments. --- src/paas_charm/app.py | 6 ++-- src/paas_charm/charm.py | 10 ++----- src/paas_charm/charm_state.py | 46 ++++++++++++++++++++++++------ tests/unit/flask/test_webserver.py | 8 ++++-- 4 files changed, 49 insertions(+), 21 deletions(-) diff --git a/src/paas_charm/app.py b/src/paas_charm/app.py index 2319093..4d7a803 100644 --- a/src/paas_charm/app.py +++ b/src/paas_charm/app.py @@ -278,10 +278,10 @@ def map_integrations_to_env(integrations: IntegrationsState, prefix: str = "") - for interface_name, uri in integrations.databases_uris.items(): interface_envvars = _db_url_to_env_variables(interface_name.upper(), uri) env.update(interface_envvars) - if integrations.tracing_relation_data: - if service_name := integrations.tracing_relation_data.service_name: + if integrations.tempo_parameters: + if service_name := integrations.tempo_parameters.service_name: env.update({"OTEL_SERVICE_NAME": service_name}) - if endpoint := integrations.tracing_relation_data.endpoint: + if endpoint := integrations.tempo_parameters.endpoint: env.update({"OTEL_EXPORTER_OTLP_ENDPOINT": endpoint}) if integrations.s3_parameters: diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index c75988d..a1f323a 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -415,7 +415,7 @@ def _missing_required_other_integrations( if self._saml and not charm_state.integrations.saml_parameters: if not requires["saml"].optional: yield "saml" - if self._tracing and not charm_state.integrations.tracing_relation_data: + if self._tracing and not charm_state.integrations.tempo_parameters: if not requires["tracing"].optional: yield "tracing" @@ -482,12 +482,6 @@ def _create_charm_state(self) -> CharmState: for k, v in charm_config.items() }, ) - tracing_relation_data = None - if self._tracing and self._tracing.is_ready(): - tracing_relation_data = TempoParameters( - endpoint=f'{self._tracing.get_endpoint(protocol="otlp_http")}', - service_name=self.app.name, - ) return CharmState.from_charm( config=config, framework=self._framework_name, @@ -498,7 +492,7 @@ def _create_charm_state(self) -> CharmState: s3_connection_info=self._s3.get_s3_connection_info() if self._s3 else None, saml_relation_data=saml_relation_data, rabbitmq_uri=self._rabbitmq.rabbitmq_uri() if self._rabbitmq else None, - tracing_relation_data=tracing_relation_data, + tempo_parameters=TempoParameters.from_charm(name=self.app.name, tracing=self._tracing), base_url=self._base_url, ) diff --git a/src/paas_charm/charm_state.py b/src/paas_charm/charm_state.py index 6c17fea..7b5dae0 100644 --- a/src/paas_charm/charm_state.py +++ b/src/paas_charm/charm_state.py @@ -19,6 +19,15 @@ logger = logging.getLogger(__name__) +try: + # pylint: disable=ungrouped-imports + from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer +except ImportError: + logger.exception( + "Missing charm library, please run " + "`charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing`" + ) + class TempoParameters(BaseModel): """Configuration for accessing Tempo service. @@ -31,6 +40,27 @@ class TempoParameters(BaseModel): endpoint: str | None = None service_name: str | None = None + @classmethod + def from_charm( + cls, *, name: str, tracing: TracingEndpointRequirer | None + ) -> "TempoParameters | None": + """Initialize a new instance of the TempoParameters class from the associated charm. + + Args: + name: Name of the tracing service. + tracing: The tracing integration object. + + Return: + The TempoParameters instance created by the provided charm if + Tempo is relation is ready. + """ + if tracing and tracing.is_ready(): + return cls( + endpoint=f"{tracing.get_endpoint(protocol="otlp_http")}", + service_name=name, + ) + return None + class S3Parameters(BaseModel): """Configuration for accessing S3 bucket. @@ -178,7 +208,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None = None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, - tracing_relation_data: TempoParameters | None = None, + tempo_parameters: TempoParameters | None = None, base_url: str | None = None, ) -> "CharmState": """Initialize a new instance of the CharmState class from the associated charm. @@ -193,7 +223,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info: Connection info from S3 lib. saml_relation_data: Relation data from the SAML app. rabbitmq_uri: RabbitMQ uri. - tracing_relation_data: The tracing uri provided by the Tempo coordinator charm + tempo_parameters: The tracing uri provided by the Tempo coordinator charm and charm name. base_url: Base URL for the service. @@ -215,7 +245,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info=s3_connection_info, saml_relation_data=saml_relation_data, rabbitmq_uri=rabbitmq_uri, - tracing_relation_data=tracing_relation_data, + tempo_parameters=tempo_parameters, ) return cls( framework=framework, @@ -302,7 +332,7 @@ class IntegrationsState: s3_parameters: S3 parameters. saml_parameters: SAML parameters. rabbitmq_uri: RabbitMQ uri. - tracing_relation_data: The tracing uri provided by the Tempo coordinator charm + tempo_parameters: The tracing uri provided by the Tempo coordinator charm and charm name. """ @@ -311,7 +341,7 @@ class IntegrationsState: s3_parameters: S3Parameters | None = None saml_parameters: SamlParameters | None = None rabbitmq_uri: str | None = None - tracing_relation_data: TempoParameters | None = None + tempo_parameters: TempoParameters | None = None # This dataclass combines all the integrations, so it is reasonable that they stay together. @classmethod @@ -323,7 +353,7 @@ def build( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, - tracing_relation_data: TempoParameters | None = None, + tempo_parameters: TempoParameters | None = None, ) -> "IntegrationsState": """Initialize a new instance of the IntegrationsState class. @@ -335,7 +365,7 @@ def build( # pylint: disable=too-many-arguments s3_connection_info: S3 connection info from S3 lib. saml_relation_data: Saml relation data from saml lib. rabbitmq_uri: RabbitMQ uri. - tracing_relation_data: The tracing uri provided by the Tempo coordinator charm + tempo_parameters: The tracing uri provided by the Tempo coordinator charm and charm name. Return: @@ -384,5 +414,5 @@ def build( # pylint: disable=too-many-arguments s3_parameters=s3_parameters, saml_parameters=saml_parameters, rabbitmq_uri=rabbitmq_uri, - tracing_relation_data=tracing_relation_data, + tempo_parameters=tempo_parameters, ) diff --git a/tests/unit/flask/test_webserver.py b/tests/unit/flask/test_webserver.py index 059609d..0d58ed4 100644 --- a/tests/unit/flask/test_webserver.py +++ b/tests/unit/flask/test_webserver.py @@ -80,7 +80,9 @@ def post_fork(server, worker): ] -@pytest.mark.parametrize("charm_state_params, tracing_enabled, config_file", GUNICORN_CONFIG_TEST_PARAMS) +@pytest.mark.parametrize( + "charm_state_params, tracing_enabled, config_file", GUNICORN_CONFIG_TEST_PARAMS +) def test_gunicorn_config( harness: Harness, charm_state_params, @@ -104,7 +106,9 @@ def test_gunicorn_config( secret_key="", is_secret_storage_ready=True, ) - workload_config = create_workload_config(framework_name="flask", unit_name="flask/0", tracing_enabled=tracing_enabled) + workload_config = create_workload_config( + framework_name="flask", unit_name="flask/0", tracing_enabled=tracing_enabled + ) webserver_config = WebserverConfig(**charm_state_params) webserver = GunicornWebserver( webserver_config=webserver_config, From 1c0a046dd2c2b0819e6b9ed9740eac353053a5c2 Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Mon, 27 Jan 2025 12:22:54 +0300 Subject: [PATCH 60/70] fix(): Fix ' --- src/paas_charm/charm_state.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/paas_charm/charm_state.py b/src/paas_charm/charm_state.py index 7b5dae0..637bd14 100644 --- a/src/paas_charm/charm_state.py +++ b/src/paas_charm/charm_state.py @@ -56,7 +56,7 @@ def from_charm( """ if tracing and tracing.is_ready(): return cls( - endpoint=f"{tracing.get_endpoint(protocol="otlp_http")}", + endpoint=f"{tracing.get_endpoint(protocol='otlp_http')}", service_name=name, ) return None From 5c86c234e6ea549219b892c0434d88c185d264ea Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Tue, 28 Jan 2025 07:03:17 +0300 Subject: [PATCH 61/70] Fix(config): config error fixed when tracing relation is not there --- src/paas_charm/_gunicorn/charm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/paas_charm/_gunicorn/charm.py b/src/paas_charm/_gunicorn/charm.py index 9f0a424..9e4809e 100644 --- a/src/paas_charm/_gunicorn/charm.py +++ b/src/paas_charm/_gunicorn/charm.py @@ -26,7 +26,7 @@ def _workload_config(self) -> WorkloadConfig: return create_workload_config( framework_name=self._framework_name, unit_name=self.unit.name, - tracing_enabled=bool(self._tracing), + tracing_enabled=bool(self._tracing and self._tracing.is_ready()), ) def create_webserver_config(self) -> WebserverConfig: From 8eeb7a3011fc573b0fb660d8a830236a236abdeb Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Tue, 28 Jan 2025 09:53:37 +0300 Subject: [PATCH 62/70] Chore(): Revert minor changes. --- .../lib/charms/traefik_k8s/v2/ingress.py | 27 +- .../data_platform_libs/v0/data_interfaces.py | 530 ++++-------------- requirements.txt | 2 +- 3 files changed, 130 insertions(+), 429 deletions(-) diff --git a/examples/fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py b/examples/fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py index 5fb2cae..582a31f 100644 --- a/examples/fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py +++ b/examples/fastapi/charm/lib/charms/traefik_k8s/v2/ingress.py @@ -56,14 +56,13 @@ def _on_ingress_revoked(self, event: IngressPerAppRevokedEvent): import socket import typing from dataclasses import dataclass -from functools import partial from typing import Any, Callable, Dict, List, MutableMapping, Optional, Sequence, Tuple, Union import pydantic from ops.charm import CharmBase, RelationBrokenEvent, RelationEvent from ops.framework import EventSource, Object, ObjectEvents, StoredState from ops.model import ModelError, Relation, Unit -from pydantic import AnyHttpUrl, BaseModel, Field +from pydantic import AnyHttpUrl, BaseModel, Field, validator # The unique Charmhub library identifier, never change it LIBID = "e6de2a5cd5b34422a204668f3b8f90d2" @@ -73,7 +72,7 @@ def _on_ingress_revoked(self, event: IngressPerAppRevokedEvent): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 14 +LIBPATCH = 13 PYDEPS = ["pydantic"] @@ -85,9 +84,6 @@ def _on_ingress_revoked(self, event: IngressPerAppRevokedEvent): PYDANTIC_IS_V1 = int(pydantic.version.VERSION.split(".")[0]) < 2 if PYDANTIC_IS_V1: - from pydantic import validator - - input_validator = partial(validator, pre=True) class DatabagModel(BaseModel): # type: ignore """Base databag model.""" @@ -147,9 +143,7 @@ def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): return databag else: - from pydantic import ConfigDict, field_validator - - input_validator = partial(field_validator, mode="before") + from pydantic import ConfigDict class DatabagModel(BaseModel): """Base databag model.""" @@ -177,7 +171,7 @@ def load(cls, databag: MutableMapping): k: json.loads(v) for k, v in databag.items() # Don't attempt to parse model-external values - if k in {(f.alias or n) for n, f in cls.model_fields.items()} # type: ignore + if k in {(f.alias or n) for n, f in cls.__fields__.items()} # type: ignore } except json.JSONDecodeError as e: msg = f"invalid databag contents: expecting json. {databag}" @@ -258,14 +252,14 @@ class IngressRequirerAppData(DatabagModel): default="http", description="What scheme to use in the generated ingress url" ) - @input_validator("scheme") + @validator("scheme", pre=True) def validate_scheme(cls, scheme): # noqa: N805 # pydantic wants 'cls' as first arg """Validate scheme arg.""" if scheme not in {"http", "https", "h2c"}: raise ValueError("invalid scheme: should be one of `http|https|h2c`") return scheme - @input_validator("port") + @validator("port", pre=True) def validate_port(cls, port): # noqa: N805 # pydantic wants 'cls' as first arg """Validate port.""" assert isinstance(port, int), type(port) @@ -283,13 +277,13 @@ class IngressRequirerUnitData(DatabagModel): "IP can only be None if the IP information can't be retrieved from juju.", ) - @input_validator("host") + @validator("host", pre=True) def validate_host(cls, host): # noqa: N805 # pydantic wants 'cls' as first arg """Validate host.""" assert isinstance(host, str), type(host) return host - @input_validator("ip") + @validator("ip", pre=True) def validate_ip(cls, ip): # noqa: N805 # pydantic wants 'cls' as first arg """Validate ip.""" if ip is None: @@ -468,10 +462,7 @@ def _handle_relation(self, event): event.relation, data.app.name, data.app.model, - [ - unit.dict() if PYDANTIC_IS_V1 else unit.model_dump(mode="json") - for unit in data.units - ], + [unit.dict() for unit in data.units], data.app.strip_prefix or False, data.app.redirect_https or False, ) diff --git a/examples/flask/lib/charms/data_platform_libs/v0/data_interfaces.py b/examples/flask/lib/charms/data_platform_libs/v0/data_interfaces.py index 3bc2dd8..b331bdc 100644 --- a/examples/flask/lib/charms/data_platform_libs/v0/data_interfaces.py +++ b/examples/flask/lib/charms/data_platform_libs/v0/data_interfaces.py @@ -331,14 +331,10 @@ def _on_topic_requested(self, event: TopicRequestedEvent): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 40 +LIBPATCH = 36 PYDEPS = ["ops>=2.0.0"] -# Starting from what LIBPATCH number to apply legacy solutions -# v0.17 was the last version without secrets -LEGACY_SUPPORT_FROM = 17 - logger = logging.getLogger(__name__) Diff = namedtuple("Diff", "added changed deleted") @@ -355,16 +351,36 @@ def _on_topic_requested(self, event: TopicRequestedEvent): GROUP_MAPPING_FIELD = "secret_group_mapping" GROUP_SEPARATOR = "@" -MODEL_ERRORS = { - "not_leader": "this unit is not the leader", - "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", - "owner_no_refresh": "ERROR secret owner cannot use --refresh", -} +class SecretGroup(str): + """Secret groups specific type.""" + + +class SecretGroupsAggregate(str): + """Secret groups with option to extend with additional constants.""" + + def __init__(self): + self.USER = SecretGroup("user") + self.TLS = SecretGroup("tls") + self.EXTRA = SecretGroup("extra") + + def __setattr__(self, name, value): + """Setting internal constants.""" + if name in self.__dict__: + raise RuntimeError("Can't set constant!") + else: + super().__setattr__(name, SecretGroup(value)) + + def groups(self) -> list: + """Return the list of stored SecretGroups.""" + return list(self.__dict__.values()) + + def get_group(self, group: str) -> Optional[SecretGroup]: + """If the input str translates to a group name, return that.""" + return SecretGroup(group) if group in self.groups() else None -############################################################################## -# Exceptions -############################################################################## + +SECRET_GROUPS = SecretGroupsAggregate() class DataInterfacesError(Exception): @@ -391,19 +407,6 @@ class IllegalOperationError(DataInterfacesError): """To be used when an operation is not allowed to be performed.""" -class PrematureDataAccessError(DataInterfacesError): - """To be raised when the Relation Data may be accessed (written) before protocol init complete.""" - - -############################################################################## -# Global helpers / utilities -############################################################################## - -############################################################################## -# Databag handling and comparison methods -############################################################################## - - def get_encoded_dict( relation: Relation, member: Union[Unit, Application], field: str ) -> Optional[Dict[str, str]]: @@ -479,11 +482,6 @@ def diff(event: RelationChangedEvent, bucket: Optional[Union[Unit, Application]] return Diff(added, changed, deleted) -############################################################################## -# Module decorators -############################################################################## - - def leader_only(f): """Decorator to ensure that only leader can perform given operation.""" @@ -538,36 +536,6 @@ def wrapper(self, *args, **kwargs): return wrapper -def legacy_apply_from_version(version: int) -> Callable: - """Decorator to decide whether to apply a legacy function or not. - - Based on LEGACY_SUPPORT_FROM module variable value, the importer charm may only want - to apply legacy solutions starting from a specific LIBPATCH. - - NOTE: All 'legacy' functions have to be defined and called in a way that they return `None`. - This results in cleaner and more secure execution flows in case the function may be disabled. - This requirement implicitly means that legacy functions change the internal state strictly, - don't return information. - """ - - def decorator(f: Callable[..., None]): - """Signature is ensuring None return value.""" - f.legacy_version = version - - def wrapper(self, *args, **kwargs) -> None: - if version >= LEGACY_SUPPORT_FROM: - return f(self, *args, **kwargs) - - return wrapper - - return decorator - - -############################################################################## -# Helper classes -############################################################################## - - class Scope(Enum): """Peer relations scope.""" @@ -575,35 +543,9 @@ class Scope(Enum): UNIT = "unit" -class SecretGroup(str): - """Secret groups specific type.""" - - -class SecretGroupsAggregate(str): - """Secret groups with option to extend with additional constants.""" - - def __init__(self): - self.USER = SecretGroup("user") - self.TLS = SecretGroup("tls") - self.EXTRA = SecretGroup("extra") - - def __setattr__(self, name, value): - """Setting internal constants.""" - if name in self.__dict__: - raise RuntimeError("Can't set constant!") - else: - super().__setattr__(name, SecretGroup(value)) - - def groups(self) -> list: - """Return the list of stored SecretGroups.""" - return list(self.__dict__.values()) - - def get_group(self, group: str) -> Optional[SecretGroup]: - """If the input str translates to a group name, return that.""" - return SecretGroup(group) if group in self.groups() else None - - -SECRET_GROUPS = SecretGroupsAggregate() +################################################################################ +# Secrets internal caching +################################################################################ class CachedSecret: @@ -612,8 +554,6 @@ class CachedSecret: The data structure is precisely re-using/simulating as in the actual Secret Storage """ - KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] - def __init__( self, model: Model, @@ -631,95 +571,6 @@ def __init__( self.legacy_labels = legacy_labels self.current_label = None - @property - def meta(self) -> Optional[Secret]: - """Getting cached secret meta-information.""" - if not self._secret_meta: - if not (self._secret_uri or self.label): - return - - try: - self._secret_meta = self._model.get_secret(label=self.label) - except SecretNotFoundError: - # Falling back to seeking for potential legacy labels - self._legacy_compat_find_secret_by_old_label() - - # If still not found, to be checked by URI, to be labelled with the proposed label - if not self._secret_meta and self._secret_uri: - self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) - return self._secret_meta - - ########################################################################## - # Backwards compatibility / Upgrades - ########################################################################## - # These functions are used to keep backwards compatibility on rolling upgrades - # Policy: - # All data is kept intact until the first write operation. (This allows a minimal - # grace period during which rollbacks are fully safe. For more info see the spec.) - # All data involves: - # - databag contents - # - secrets content - # - secret labels (!!!) - # Legacy functions must return None, and leave an equally consistent state whether - # they are executed or skipped (as a high enough versioned execution environment may - # not require so) - - # Compatibility - - @legacy_apply_from_version(34) - def _legacy_compat_find_secret_by_old_label(self) -> None: - """Compatibility function, allowing to find a secret by a legacy label. - - This functionality is typically needed when secret labels changed over an upgrade. - Until the first write operation, we need to maintain data as it was, including keeping - the old secret label. In order to keep track of the old label currently used to access - the secret, and additional 'current_label' field is being defined. - """ - for label in self.legacy_labels: - try: - self._secret_meta = self._model.get_secret(label=label) - except SecretNotFoundError: - pass - else: - if label != self.label: - self.current_label = label - return - - # Migrations - - @legacy_apply_from_version(34) - def _legacy_migration_to_new_label_if_needed(self) -> None: - """Helper function to re-create the secret with a different label. - - Juju does not provide a way to change secret labels. - Thus whenever moving from secrets version that involves secret label changes, - we "re-create" the existing secret, and attach the new label to the new - secret, to be used from then on. - - Note: we replace the old secret with a new one "in place", as we can't - easily switch the containing SecretCache structure to point to a new secret. - Instead we are changing the 'self' (CachedSecret) object to point to the - new instance. - """ - if not self.current_label or not (self.meta and self._secret_meta): - return - - # Create a new secret with the new label - content = self._secret_meta.get_content() - self._secret_uri = None - - # It will be nice to have the possibility to check if we are the owners of the secret... - try: - self._secret_meta = self.add_secret(content, label=self.label) - except ModelError as err: - if MODEL_ERRORS["not_leader"] not in str(err): - raise - self.current_label = None - - ########################################################################## - # Public functions - ########################################################################## - def add_secret( self, content: Dict[str, str], @@ -742,6 +593,28 @@ def add_secret( self._secret_meta = secret return self._secret_meta + @property + def meta(self) -> Optional[Secret]: + """Getting cached secret meta-information.""" + if not self._secret_meta: + if not (self._secret_uri or self.label): + return + + for label in [self.label] + self.legacy_labels: + try: + self._secret_meta = self._model.get_secret(label=label) + except SecretNotFoundError: + pass + else: + if label != self.label: + self.current_label = label + break + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) + return self._secret_meta + def get_content(self) -> Dict[str, str]: """Getting cached secret content.""" if not self._secret_content: @@ -751,25 +624,42 @@ def get_content(self) -> Dict[str, str]: except (ValueError, ModelError) as err: # https://bugs.launchpad.net/juju/+bug/2042596 # Only triggered when 'refresh' is set + known_model_errors = [ + "ERROR either URI or label should be used for getting an owned secret but not both", + "ERROR secret owner cannot use --refresh", + ] if isinstance(err, ModelError) and not any( - msg in str(err) for msg in self.KNOWN_MODEL_ERRORS + msg in str(err) for msg in known_model_errors ): raise # Due to: ValueError: Secret owner cannot use refresh=True self._secret_content = self.meta.get_content() return self._secret_content + def _move_to_new_label_if_needed(self): + """Helper function to re-create the secret with a different label.""" + if not self.current_label or not (self.meta and self._secret_meta): + return + + # Create a new secret with the new label + content = self._secret_meta.get_content() + self._secret_uri = None + + # I wish we could just check if we are the owners of the secret... + try: + self._secret_meta = self.add_secret(content, label=self.label) + except ModelError as err: + if "this unit is not the leader" not in str(err): + raise + self.current_label = None + def set_content(self, content: Dict[str, str]) -> None: """Setting cached secret content.""" if not self.meta: return - # DPE-4182: do not create new revision if the content stay the same - if content == self.get_content(): - return - if content: - self._legacy_migration_to_new_label_if_needed() + self._move_to_new_label_if_needed() self.meta.set_content(content) self._secret_content = content else: @@ -1032,23 +922,6 @@ def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" raise NotImplementedError - # Optional overrides - - def _legacy_apply_on_fetch(self) -> None: - """This function should provide a list of compatibility functions to be applied when fetching (legacy) data.""" - pass - - def _legacy_apply_on_update(self, fields: List[str]) -> None: - """This function should provide a list of compatibility functions to be applied when writing data. - - Since data may be at a legacy version, migration may be mandatory. - """ - pass - - def _legacy_apply_on_delete(self, fields: List[str]) -> None: - """This function should provide a list of compatibility functions to be applied when deleting (legacy) data.""" - pass - # Internal helper methods @staticmethod @@ -1301,16 +1174,6 @@ def get_relation(self, relation_name, relation_id) -> Relation: return relation - def get_secret_uri(self, relation: Relation, group: SecretGroup) -> Optional[str]: - """Get the secret URI for the corresponding group.""" - secret_field = self._generate_secret_field_name(group) - return relation.data[self.component].get(secret_field) - - def set_secret_uri(self, relation: Relation, group: SecretGroup, secret_uri: str) -> None: - """Set the secret URI for the corresponding group.""" - secret_field = self._generate_secret_field_name(group) - relation.data[self.component][secret_field] = secret_uri - def fetch_relation_data( self, relation_ids: Optional[List[int]] = None, @@ -1327,8 +1190,6 @@ def fetch_relation_data( a dict of the values stored in the relation data bag for all relation instances (indexed by the relation ID). """ - self._legacy_apply_on_fetch() - if not relation_name: relation_name = self.relation_name @@ -1367,8 +1228,6 @@ def fetch_my_relation_data( NOTE: Since only the leader can read the relation's 'this_app'-side Application databag, the functionality is limited to leaders """ - self._legacy_apply_on_fetch() - if not relation_name: relation_name = self.relation_name @@ -1400,8 +1259,6 @@ def fetch_my_relation_field( @leader_only def update_relation_data(self, relation_id: int, data: dict) -> None: """Update the data within the relation.""" - self._legacy_apply_on_update(list(data.keys())) - relation_name = self.relation_name relation = self.get_relation(relation_name, relation_id) return self._update_relation_data(relation, data) @@ -1409,8 +1266,6 @@ def update_relation_data(self, relation_id: int, data: dict) -> None: @leader_only def delete_relation_data(self, relation_id: int, fields: List[str]) -> None: """Remove field from the relation.""" - self._legacy_apply_on_delete(fields) - relation_name = self.relation_name relation = self.get_relation(relation_name, relation_id) return self._delete_relation_data(relation, fields) @@ -1457,8 +1312,6 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: class ProviderData(Data): """Base provides-side of the data products relation.""" - RESOURCE_FIELD = "database" - def __init__( self, model: Model, @@ -1479,7 +1332,8 @@ def _add_relation_secret( uri_to_databag=True, ) -> bool: """Add a new Juju Secret that will be registered in the relation databag.""" - if uri_to_databag and self.get_secret_uri(relation, group_mapping): + secret_field = self._generate_secret_field_name(group_mapping) + if uri_to_databag and relation.data[self.component].get(secret_field): logging.error("Secret for relation %s already exists, not adding again", relation.id) return False @@ -1490,7 +1344,7 @@ def _add_relation_secret( # According to lint we may not have a Secret ID if uri_to_databag and secret.meta and secret.meta.id: - self.set_secret_uri(relation, group_mapping, secret.meta.id) + relation.data[self.component][secret_field] = secret.meta.id # Return the content that was added return True @@ -1591,7 +1445,8 @@ def _get_relation_secret( if not relation: return - if secret_uri := self.get_secret_uri(relation, group_mapping): + secret_field = self._generate_secret_field_name(group_mapping) + if secret_uri := relation.data[self.local_app].get(secret_field): return self.secrets.get(label, secret_uri) def _fetch_specific_relation_data( @@ -1624,15 +1479,6 @@ def _fetch_my_specific_relation_data( def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: """Set values for fields not caring whether it's a secret or not.""" req_secret_fields = [] - - keys = set(data.keys()) - if self.fetch_relation_field(relation.id, self.RESOURCE_FIELD) is None and ( - keys - {"endpoints", "read-only-endpoints", "replset"} - ): - raise PrematureDataAccessError( - "Premature access to relation data, update is forbidden before the connection is initialized." - ) - if relation.app: req_secret_fields = get_encoded_list(relation, relation.app, REQ_SECRET_FIELDS) @@ -1753,10 +1599,11 @@ def _register_secrets_to_relation(self, relation: Relation, params_name_list: Li for group in SECRET_GROUPS.groups(): secret_field = self._generate_secret_field_name(group) - if secret_field in params_name_list and ( - secret_uri := self.get_secret_uri(relation, group) - ): - self._register_secret_to_relation(relation.name, relation.id, secret_uri, group) + if secret_field in params_name_list: + if secret_uri := relation.data[relation.app].get(secret_field): + self._register_secret_to_relation( + relation.name, relation.id, secret_uri, group + ) def _is_resource_created_for_relation(self, relation: Relation) -> bool: if not relation.app: @@ -1767,17 +1614,6 @@ def _is_resource_created_for_relation(self, relation: Relation) -> bool: ) return bool(data.get("username")) and bool(data.get("password")) - # Public functions - - def get_secret_uri(self, relation: Relation, group: SecretGroup) -> Optional[str]: - """Getting relation secret URI for the corresponding Secret Group.""" - secret_field = self._generate_secret_field_name(group) - return relation.data[relation.app].get(secret_field) - - def set_secret_uri(self, relation: Relation, group: SecretGroup, uri: str) -> None: - """Setting relation secret URI is not possible for a Requirer.""" - raise NotImplementedError("Requirer can not change the relation secret URI.") - def is_resource_created(self, relation_id: Optional[int] = None) -> bool: """Check if the resource has been created. @@ -1928,6 +1764,7 @@ def __init__( secret_field_name: Optional[str] = None, deleted_label: Optional[str] = None, ): + """Manager of base client relations.""" RequirerData.__init__( self, model, @@ -1938,11 +1775,6 @@ def __init__( self.secret_field_name = secret_field_name if secret_field_name else self.SECRET_FIELD_NAME self.deleted_label = deleted_label self._secret_label_map = {} - - # Legacy information holders - self._legacy_labels = [] - self._legacy_secret_uri = None - # Secrets that are being dynamically added within the scope of this event handler run self._new_secrets = [] self._additional_secret_group_mapping = additional_secret_group_mapping @@ -2017,12 +1849,10 @@ def set_secret( value: The string value of the secret group_mapping: The name of the "secret group", in case the field is to be added to an existing secret """ - self._legacy_apply_on_update([field]) - full_field = self._field_to_internal_name(field, group_mapping) if self.secrets_enabled and full_field not in self.current_secret_fields: self._new_secrets.append(full_field) - if self.valid_field_pattern(field, full_field): + if self._no_group_with_databag(field, full_field): self.update_relation_data(relation_id, {full_field: value}) # Unlike for set_secret(), there's no harm using this operation with static secrets @@ -2035,8 +1865,6 @@ def get_secret( group_mapping: Optional[SecretGroup] = None, ) -> Optional[str]: """Public interface method to fetch secrets only.""" - self._legacy_apply_on_fetch() - full_field = self._field_to_internal_name(field, group_mapping) if ( self.secrets_enabled @@ -2044,7 +1872,7 @@ def get_secret( and field not in self.current_secret_fields ): return - if self.valid_field_pattern(field, full_field): + if self._no_group_with_databag(field, full_field): return self.fetch_my_relation_field(relation_id, full_field) @dynamic_secrets_only @@ -2055,19 +1883,14 @@ def delete_secret( group_mapping: Optional[SecretGroup] = None, ) -> Optional[str]: """Public interface method to delete secrets only.""" - self._legacy_apply_on_delete([field]) - full_field = self._field_to_internal_name(field, group_mapping) if self.secrets_enabled and full_field not in self.current_secret_fields: logger.warning(f"Secret {field} from group {group_mapping} was not found") return - - if self.valid_field_pattern(field, full_field): + if self._no_group_with_databag(field, full_field): self.delete_relation_data(relation_id, [full_field]) - ########################################################################## # Helpers - ########################################################################## @staticmethod def _field_to_internal_name(field: str, group: Optional[SecretGroup]) -> str: @@ -2109,69 +1932,10 @@ def _content_for_secret_group( if k in self.secret_fields } - def valid_field_pattern(self, field: str, full_field: str) -> bool: - """Check that no secret group is attempted to be used together without secrets being enabled. - - Secrets groups are impossible to use with versions that are not yet supporting secrets. - """ - if not self.secrets_enabled and full_field != field: - logger.error( - f"Can't access {full_field}: no secrets available (i.e. no secret groups either)." - ) - return False - return True - - ########################################################################## - # Backwards compatibility / Upgrades - ########################################################################## - # These functions are used to keep backwards compatibility on upgrades - # Policy: - # All data is kept intact until the first write operation. (This allows a minimal - # grace period during which rollbacks are fully safe. For more info see spec.) - # All data involves: - # - databag - # - secrets content - # - secret labels (!!!) - # Legacy functions must return None, and leave an equally consistent state whether - # they are executed or skipped (as a high enough versioned execution environment may - # not require so) - - # Full legacy stack for each operation - - def _legacy_apply_on_fetch(self) -> None: - """All legacy functions to be applied on fetch.""" - relation = self._model.relations[self.relation_name][0] - self._legacy_compat_generate_prev_labels() - self._legacy_compat_secret_uri_from_databag(relation) - - def _legacy_apply_on_update(self, fields) -> None: - """All legacy functions to be applied on update.""" - relation = self._model.relations[self.relation_name][0] - self._legacy_compat_generate_prev_labels() - self._legacy_compat_secret_uri_from_databag(relation) - self._legacy_migration_remove_secret_from_databag(relation, fields) - self._legacy_migration_remove_secret_field_name_from_databag(relation) - - def _legacy_apply_on_delete(self, fields) -> None: - """All legacy functions to be applied on delete.""" - relation = self._model.relations[self.relation_name][0] - self._legacy_compat_generate_prev_labels() - self._legacy_compat_secret_uri_from_databag(relation) - self._legacy_compat_check_deleted_label(relation, fields) - - # Compatibility - - @legacy_apply_from_version(18) - def _legacy_compat_check_deleted_label(self, relation, fields) -> None: - """Helper function for legacy behavior. - - As long as https://bugs.launchpad.net/juju/+bug/2028094 wasn't fixed, - we did not delete fields but rather kept them in the secret with a string value - expressing invalidity. This function is maintainnig that behavior when needed. - """ - if not self.deleted_label: - return + # Backwards compatibility + def _check_deleted_label(self, relation, fields) -> None: + """Helper function for legacy behavior.""" current_data = self.fetch_my_relation_data([relation.id], fields) if current_data is not None: # Check if the secret we wanna delete actually exists @@ -2184,43 +1948,7 @@ def _legacy_compat_check_deleted_label(self, relation, fields) -> None: ", ".join(non_existent), ) - @legacy_apply_from_version(18) - def _legacy_compat_secret_uri_from_databag(self, relation) -> None: - """Fetching the secret URI from the databag, in case stored there.""" - self._legacy_secret_uri = relation.data[self.component].get( - self._generate_secret_field_name(), None - ) - - @legacy_apply_from_version(34) - def _legacy_compat_generate_prev_labels(self) -> None: - """Generator for legacy secret label names, for backwards compatibility. - - Secret label is part of the data that MUST be maintained across rolling upgrades. - In case there may be a change on a secret label, the old label must be recognized - after upgrades, and left intact until the first write operation -- when we roll over - to the new label. - - This function keeps "memory" of previously used secret labels. - NOTE: Return value takes decorator into account -- all 'legacy' functions may return `None` - - v0.34 (rev69): Fixing issue https://github.com/canonical/data-platform-libs/issues/155 - meant moving from '.' (i.e. 'mysql.app', 'mysql.unit') - to labels '..' (like 'peer.mysql.app') - """ - if self._legacy_labels: - return - - result = [] - members = [self._model.app.name] - if self.scope: - members.append(self.scope.value) - result.append(f"{'.'.join(members)}") - self._legacy_labels = result - - # Migration - - @legacy_apply_from_version(18) - def _legacy_migration_remove_secret_from_databag(self, relation, fields: List[str]) -> None: + def _remove_secret_from_databag(self, relation, fields: List[str]) -> None: """For Rolling Upgrades -- when moving from databag to secrets usage. Practically what happens here is to remove stuff from the databag that is @@ -2234,16 +1962,10 @@ def _legacy_migration_remove_secret_from_databag(self, relation, fields: List[st if self._fetch_relation_data_without_secrets(self.component, relation, [field]): self._delete_relation_data_without_secrets(self.component, relation, [field]) - @legacy_apply_from_version(18) - def _legacy_migration_remove_secret_field_name_from_databag(self, relation) -> None: + def _remove_secret_field_name_from_databag(self, relation) -> None: """Making sure that the old databag URI is gone. This action should not be executed more than once. - - There was a phase (before moving secrets usage to libs) when charms saved the peer - secret URI to the databag, and used this URI from then on to retrieve their secret. - When upgrading to charm versions using this library, we need to add a label to the - secret and access it via label from than on, and remove the old traces from the databag. """ # Nothing to do if 'internal-secret' is not in the databag if not (relation.data[self.component].get(self._generate_secret_field_name())): @@ -2259,9 +1981,25 @@ def _legacy_migration_remove_secret_field_name_from_databag(self, relation) -> N # Databag reference to the secret URI can be removed, now that it's labelled relation.data[self.component].pop(self._generate_secret_field_name(), None) - ########################################################################## + def _previous_labels(self) -> List[str]: + """Generator for legacy secret label names, for backwards compatibility.""" + result = [] + members = [self._model.app.name] + if self.scope: + members.append(self.scope.value) + result.append(f"{'.'.join(members)}") + return result + + def _no_group_with_databag(self, field: str, full_field: str) -> bool: + """Check that no secret group is attempted to be used together with databag.""" + if not self.secrets_enabled and full_field != field: + logger.error( + f"Can't access {full_field}: no secrets available (i.e. no secret groups either)." + ) + return False + return True + # Event handlers - ########################################################################## def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: """Event emitted when the relation has changed.""" @@ -2271,9 +2009,7 @@ def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: """Event emitted when the secret has changed.""" pass - ########################################################################## # Overrides of Relation Data handling functions - ########################################################################## def _generate_secret_label( self, relation_name: str, relation_id: int, group_mapping: SecretGroup @@ -2310,14 +2046,13 @@ def _get_relation_secret( return label = self._generate_secret_label(relation_name, relation_id, group_mapping) + secret_uri = relation.data[self.component].get(self._generate_secret_field_name(), None) # URI or legacy label is only to applied when moving single legacy secret to a (new) label if group_mapping == SECRET_GROUPS.EXTRA: # Fetching the secret with fallback to URI (in case label is not yet known) # Label would we "stuck" on the secret in case it is found - return self.secrets.get( - label, self._legacy_secret_uri, legacy_labels=self._legacy_labels - ) + return self.secrets.get(label, secret_uri, legacy_labels=self._previous_labels()) return self.secrets.get(label) def _get_group_secret_contents( @@ -2347,6 +2082,7 @@ def _fetch_my_specific_relation_data( @either_static_or_dynamic_secrets def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: """Update data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + self._remove_secret_from_databag(relation, list(data.keys())) _, normal_fields = self._process_secret_fields( relation, self.secret_fields, @@ -2355,6 +2091,7 @@ def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> Non data=data, uri_to_databag=False, ) + self._remove_secret_field_name_from_databag(relation) normal_content = {k: v for k, v in data.items() if k in normal_fields} self._update_relation_data_without_secrets(self.component, relation, normal_content) @@ -2363,6 +2100,8 @@ def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> Non def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" if self.secret_fields and self.deleted_label: + # Legacy, backwards compatibility + self._check_deleted_label(relation, fields) _, normal_fields = self._process_secret_fields( relation, @@ -2398,9 +2137,7 @@ def fetch_relation_field( "fetch_my_relation_data() and fetch_my_relation_field()" ) - ########################################################################## # Public functions -- inherited - ########################################################################## fetch_my_relation_data = Data.fetch_my_relation_data fetch_my_relation_field = Data.fetch_my_relation_field @@ -2865,14 +2602,6 @@ def set_version(self, relation_id: int, version: str) -> None: """ self.update_relation_data(relation_id, {"version": version}) - def set_subordinated(self, relation_id: int) -> None: - """Raises the subordinated flag in the application relation databag. - - Args: - relation_id: the identifier for a particular relation. - """ - self.update_relation_data(relation_id, {"subordinated": "true"}) - class DatabaseProviderEventHandlers(EventHandlers): """Provider-side of the database relation handlers.""" @@ -3109,21 +2838,6 @@ def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: """Event emitted when the database relation has changed.""" - is_subordinate = False - remote_unit_data = None - for key in event.relation.data.keys(): - if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): - remote_unit_data = event.relation.data[key] - elif isinstance(key, Application) and key.name != self.charm.app.name: - is_subordinate = event.relation.data[key].get("subordinated") == "true" - - if is_subordinate: - if not remote_unit_data: - return - - if remote_unit_data.get("state") != "ready": - return - # Check which data has changed to emit customs events. diff = self._diff(event) @@ -3305,8 +3019,6 @@ class KafkaRequiresEvents(CharmEvents): class KafkaProviderData(ProviderData): """Provider-side of the Kafka relation.""" - RESOURCE_FIELD = "topic" - def __init__(self, model: Model, relation_name: str) -> None: super().__init__(model, relation_name) @@ -3556,8 +3268,6 @@ class OpenSearchRequiresEvents(CharmEvents): class OpenSearchProvidesData(ProviderData): """Provider-side of the OpenSearch relation.""" - RESOURCE_FIELD = "index" - def __init__(self, model: Model, relation_name: str) -> None: super().__init__(model, relation_name) diff --git a/requirements.txt b/requirements.txt index 1f27559..3d372d7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ cosl jsonschema >=4.23,<4.24 ops >= 2.6 -pydantic==2.10.5 +pydantic==2.10.6 Jinja2 From 5df9a4d526d46b68d0176c25d85276337dde5f9b Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Tue, 28 Jan 2025 10:16:38 +0300 Subject: [PATCH 63/70] Un-Merge branch 'smtp-integration' into tempo-tracing From 5f37a6a5cccc6d41501c540592b8823e656fe719 Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Tue, 28 Jan 2025 10:19:27 +0300 Subject: [PATCH 64/70] Chore(): Add example lib folders to lint ignore --- tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tox.ini b/tox.ini index 8b4ae53..af71542 100644 --- a/tox.ini +++ b/tox.ini @@ -69,6 +69,8 @@ commands = --skip {toxinidir}/examples/fastapi/charm/lib \ --skip {toxinidir}/examples/go/charm/lib \ --skip {toxinidir}/examples/flask/lib \ + --skip {toxinidir}/examples/fastapi/charm/lib \ + --skip {toxinidir}/examples/go/charm/lib \ --skip {toxinidir}/tests/integration/.mypy_cache # pflake8 wrapper supports config from pyproject.toml pflake8 {[vars]src_path} {[vars]legacy_src_path} From 3de0fd4dcbe3ebb8447356017de387981f8c108e Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Tue, 28 Jan 2025 10:22:15 +0300 Subject: [PATCH 65/70] Revert "Merge branch 'smtp-integration' into tempo-tracing" This reverts commit a2b439befd1c049234ad75b0c548edd3048cd0ce, reversing changes made to 8eeb7a3011fc573b0fb660d8a830236a236abdeb. --- tox.ini | 2 -- 1 file changed, 2 deletions(-) diff --git a/tox.ini b/tox.ini index af71542..8b4ae53 100644 --- a/tox.ini +++ b/tox.ini @@ -69,8 +69,6 @@ commands = --skip {toxinidir}/examples/fastapi/charm/lib \ --skip {toxinidir}/examples/go/charm/lib \ --skip {toxinidir}/examples/flask/lib \ - --skip {toxinidir}/examples/fastapi/charm/lib \ - --skip {toxinidir}/examples/go/charm/lib \ --skip {toxinidir}/tests/integration/.mypy_cache # pflake8 wrapper supports config from pyproject.toml pflake8 {[vars]src_path} {[vars]legacy_src_path} From f49e157120c3939f7cff92008f2d8582b7973c60 Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Tue, 28 Jan 2025 10:23:30 +0300 Subject: [PATCH 66/70] Revert "Merge branch 'smtp-integration' into tempo-tracing" This reverts commit a2b439befd1c049234ad75b0c548edd3048cd0ce, reversing changes made to 8eeb7a3011fc573b0fb660d8a830236a236abdeb. --- examples/flask/charmcraft.yaml | 4 - .../lib/charms/smtp_integrator/v0/smtp.py | 373 ------------------ src/paas_charm/app.py | 18 - src/paas_charm/charm.py | 30 -- src/paas_charm/charm_state.py | 166 -------- tests/conftest.py | 1 - tests/integration/flask/conftest.py | 76 ---- tests/integration/flask/test_charm.py | 1 - tests/integration/flask/test_integrations.py | 43 -- tox.ini | 2 - 10 files changed, 714 deletions(-) delete mode 100644 examples/flask/lib/charms/smtp_integrator/v0/smtp.py diff --git a/examples/flask/charmcraft.yaml b/examples/flask/charmcraft.yaml index 945f418..7fc326f 100644 --- a/examples/flask/charmcraft.yaml +++ b/examples/flask/charmcraft.yaml @@ -137,10 +137,6 @@ requires: interface: tracing optional: True limit: 1 - smtp: - interface: smtp - optional: True - limit: 1 resources: flask-app-image: diff --git a/examples/flask/lib/charms/smtp_integrator/v0/smtp.py b/examples/flask/lib/charms/smtp_integrator/v0/smtp.py deleted file mode 100644 index d769b1a..0000000 --- a/examples/flask/lib/charms/smtp_integrator/v0/smtp.py +++ /dev/null @@ -1,373 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# Licensed under the Apache2.0. See LICENSE file in charm source for details. - -"""Library to manage the integration with the SMTP Integrator charm. - -This library contains the Requires and Provides classes for handling the integration -between an application and a charm providing the `smtp` and `smtp-legacy` integrations. -If the requirer charm supports secrets, the preferred approach is to use the `smtp` -relation to leverage them. -This library also contains a `SmtpRelationData` class to wrap the SMTP data that will -be shared via the integration. - -### Requirer Charm - -```python - -from charms.smtp_integrator.v0.smtp import SmtpDataAvailableEvent, SmtpRequires - -class SmtpRequirerCharm(ops.CharmBase): - def __init__(self, *args): - super().__init__(*args) - self.smtp = smtp.SmtpRequires(self) - self.framework.observe(self.smtp.on.smtp_data_available, self._handler) - ... - - def _handler(self, events: SmtpDataAvailableEvent) -> None: - ... - -``` - -As shown above, the library provides a custom event to handle the scenario in -which new SMTP data has been added or updated. - -### Provider Charm - -Following the previous example, this is an example of the provider charm. - -```python -from charms.smtp_integrator.v0.smtp import SmtpProvides - -class SmtpProviderCharm(ops.CharmBase): - def __init__(self, *args): - super().__init__(*args) - self.smtp = SmtpProvides(self) - ... - -``` -The SmtpProvides object wraps the list of relations into a `relations` property -and provides an `update_relation_data` method to update the relation data by passing -a `SmtpRelationData` data object. - -```python -class SmtpProviderCharm(ops.CharmBase): - ... - - def _on_config_changed(self, _) -> None: - for relation in self.model.relations[self.smtp.relation_name]: - self.smtp.update_relation_data(relation, self._get_smtp_data()) - -``` -""" - -# The unique Charmhub library identifier, never change it -LIBID = "09583c2f9c1d4c0f9a40244cfc20b0c2" - -# Increment this major API version when introducing breaking changes -LIBAPI = 0 - -# Increment this PATCH version before using `charmcraft publish-lib` or reset -# to 0 if you are raising the major API version -LIBPATCH = 14 - -PYDEPS = ["pydantic>=2"] - -# pylint: disable=wrong-import-position -import itertools -import logging -import typing -from ast import literal_eval -from enum import Enum -from typing import Dict, Optional - -import ops -from pydantic import BaseModel, Field, ValidationError - -logger = logging.getLogger(__name__) - -DEFAULT_RELATION_NAME = "smtp" -LEGACY_RELATION_NAME = "smtp-legacy" - - -class TransportSecurity(str, Enum): - """Represent the transport security values. - - Attributes: - NONE: none - STARTTLS: starttls - TLS: tls - """ - - NONE = "none" - STARTTLS = "starttls" - TLS = "tls" - - -class AuthType(str, Enum): - """Represent the auth type values. - - Attributes: - NONE: none - NOT_PROVIDED: not_provided - PLAIN: plain - """ - - NONE = "none" - NOT_PROVIDED = "not_provided" - PLAIN = "plain" - - -class SmtpRelationData(BaseModel): - """Represent the relation data. - - Attributes: - host: The hostname or IP address of the outgoing SMTP relay. - port: The port of the outgoing SMTP relay. - user: The SMTP AUTH user to use for the outgoing SMTP relay. - password: The SMTP AUTH password to use for the outgoing SMTP relay. - password_id: The secret ID where the SMTP AUTH password for the SMTP relay is stored. - auth_type: The type used to authenticate with the SMTP relay. - transport_security: The security protocol to use for the outgoing SMTP relay. - domain: The domain used by the emails sent from SMTP relay. - skip_ssl_verify: Specifies if certificate trust verification is skipped in the SMTP relay. - """ - - host: str = Field(..., min_length=1) - port: int = Field(..., ge=1, le=65536) - user: Optional[str] = None - password: Optional[str] = None - password_id: Optional[str] = None - auth_type: AuthType - transport_security: TransportSecurity - domain: Optional[str] = None - skip_ssl_verify: Optional[bool] = False - - def to_relation_data(self) -> Dict[str, str]: - """Convert an instance of SmtpRelationData to the relation representation. - - Returns: - Dict containing the representation. - """ - result = { - "host": str(self.host), - "port": str(self.port), - "auth_type": self.auth_type.value, - "transport_security": self.transport_security.value, - "skip_ssl_verify": str(self.skip_ssl_verify), - } - if self.domain: - result["domain"] = self.domain - if self.user: - result["user"] = self.user - if self.password: - result["password"] = self.password - if self.password_id: - result["password_id"] = self.password_id - return result - - -class SmtpDataAvailableEvent(ops.RelationEvent): - """Smtp event emitted when relation data has changed. - - Attributes: - host: The hostname or IP address of the outgoing SMTP relay. - port: The port of the outgoing SMTP relay. - user: The SMTP AUTH user to use for the outgoing SMTP relay. - password: The SMTP AUTH password to use for the outgoing SMTP relay. - password_id: The secret ID where the SMTP AUTH password for the SMTP relay is stored. - auth_type: The type used to authenticate with the SMTP relay. - transport_security: The security protocol to use for the outgoing SMTP relay. - domain: The domain used by the emails sent from SMTP relay. - skip_ssl_verify: Specifies if certificate trust verification is skipped in the SMTP relay. - """ - - @property - def host(self) -> str: - """Fetch the SMTP host from the relation.""" - assert self.relation.app - return typing.cast(str, self.relation.data[self.relation.app].get("host")) - - @property - def port(self) -> int: - """Fetch the SMTP port from the relation.""" - assert self.relation.app - return int(typing.cast(str, self.relation.data[self.relation.app].get("port"))) - - @property - def user(self) -> str: - """Fetch the SMTP user from the relation.""" - assert self.relation.app - return typing.cast(str, self.relation.data[self.relation.app].get("user")) - - @property - def password(self) -> str: - """Fetch the SMTP password from the relation.""" - assert self.relation.app - return typing.cast(str, self.relation.data[self.relation.app].get("password")) - - @property - def password_id(self) -> str: - """Fetch the SMTP password from the relation.""" - assert self.relation.app - return typing.cast(str, self.relation.data[self.relation.app].get("password_id")) - - @property - def auth_type(self) -> AuthType: - """Fetch the SMTP auth type from the relation.""" - assert self.relation.app - return AuthType(self.relation.data[self.relation.app].get("auth_type")) - - @property - def transport_security(self) -> TransportSecurity: - """Fetch the SMTP transport security protocol from the relation.""" - assert self.relation.app - return TransportSecurity(self.relation.data[self.relation.app].get("transport_security")) - - @property - def domain(self) -> str: - """Fetch the SMTP domain from the relation.""" - assert self.relation.app - return typing.cast(str, self.relation.data[self.relation.app].get("domain")) - - @property - def skip_ssl_verify(self) -> bool: - """Fetch the skip_ssl_verify flag from the relation.""" - assert self.relation.app - return literal_eval( - typing.cast(str, self.relation.data[self.relation.app].get("skip_ssl_verify")) - ) - - -class SmtpRequiresEvents(ops.CharmEvents): - """SMTP events. - - This class defines the events that a SMTP requirer can emit. - - Attributes: - smtp_data_available: the SmtpDataAvailableEvent. - """ - - smtp_data_available = ops.EventSource(SmtpDataAvailableEvent) - - -class SmtpRequires(ops.Object): - """Requirer side of the SMTP relation. - - Attributes: - on: events the provider can emit. - """ - - on = SmtpRequiresEvents() - - def __init__(self, charm: ops.CharmBase, relation_name: str = DEFAULT_RELATION_NAME) -> None: - """Construct. - - Args: - charm: the provider charm. - relation_name: the relation name. - """ - super().__init__(charm, relation_name) - self.charm = charm - self.relation_name = relation_name - self.framework.observe(charm.on[relation_name].relation_changed, self._on_relation_changed) - - def get_relation_data(self) -> Optional[SmtpRelationData]: - """Retrieve the relation data. - - Returns: - SmtpRelationData: the relation data. - """ - relation = self.model.get_relation(self.relation_name) - return self._get_relation_data_from_relation(relation) if relation else None - - def _get_relation_data_from_relation( - self, relation: ops.Relation - ) -> Optional[SmtpRelationData]: - """Retrieve the relation data. - - Args: - relation: the relation to retrieve the data from. - - Returns: - SmtpRelationData: the relation data. - """ - assert relation.app - relation_data = relation.data[relation.app] - if not relation_data: - return None - return SmtpRelationData( - host=typing.cast(str, relation_data.get("host")), - port=typing.cast(int, relation_data.get("port")), - user=relation_data.get("user"), - password=relation_data.get("password"), - password_id=relation_data.get("password_id"), - auth_type=AuthType(relation_data.get("auth_type")), - transport_security=TransportSecurity(relation_data.get("transport_security")), - domain=relation_data.get("domain"), - skip_ssl_verify=typing.cast(bool, relation_data.get("skip_ssl_verify")), - ) - - def _is_relation_data_valid(self, relation: ops.Relation) -> bool: - """Validate the relation data. - - Args: - relation: the relation to validate. - - Returns: - true: if the relation data is valid. - """ - try: - _ = self._get_relation_data_from_relation(relation) - return True - except ValidationError as ex: - error_fields = set( - itertools.chain.from_iterable(error["loc"] for error in ex.errors()) - ) - error_field_str = " ".join(f"{f}" for f in error_fields) - logger.warning("Error validation the relation data %s", error_field_str) - return False - - def _on_relation_changed(self, event: ops.RelationChangedEvent) -> None: - """Event emitted when the relation has changed. - - Args: - event: event triggering this handler. - """ - assert event.relation.app - relation_data = event.relation.data[event.relation.app] - if relation_data: - if relation_data["auth_type"] == AuthType.NONE.value: - logger.warning('Insecure setting: auth_type has a value "none"') - if relation_data["transport_security"] == TransportSecurity.NONE.value: - logger.warning('Insecure setting: transport_security has value "none"') - if self._is_relation_data_valid(event.relation): - self.on.smtp_data_available.emit(event.relation, app=event.app, unit=event.unit) - - -class SmtpProvides(ops.Object): - """Provider side of the SMTP relation.""" - - def __init__(self, charm: ops.CharmBase, relation_name: str = DEFAULT_RELATION_NAME) -> None: - """Construct. - - Args: - charm: the provider charm. - relation_name: the relation name. - """ - super().__init__(charm, relation_name) - self.charm = charm - self.relation_name = relation_name - - def update_relation_data(self, relation: ops.Relation, smtp_data: SmtpRelationData) -> None: - """Update the relation data. - - Args: - relation: the relation for which to update the data. - smtp_data: a SmtpRelationData instance wrapping the data to be updated. - """ - relation_data = smtp_data.to_relation_data() - if relation_data["auth_type"] == AuthType.NONE.value: - logger.warning('Insecure setting: auth_type has a value "none"') - if relation_data["transport_security"] == TransportSecurity.NONE.value: - logger.warning('Insecure setting: transport_security has value "none"') - relation.data[self.charm.model.app].update(relation_data) diff --git a/src/paas_charm/app.py b/src/paas_charm/app.py index a215fda..4d7a803 100644 --- a/src/paas_charm/app.py +++ b/src/paas_charm/app.py @@ -233,7 +233,6 @@ def _app_layer(self) -> ops.pebble.LayerDict: "command" ] = self._alternate_service_command - for service_name, service in services.items(): normalised_service_name = service_name.lower() # Add environment variables to all worker processes. @@ -323,23 +322,6 @@ def map_integrations_to_env(integrations: IntegrationsState, prefix: str = "") - rabbitmq_envvars = _rabbitmq_uri_to_env_variables("RABBITMQ", integrations.rabbitmq_uri) env.update(rabbitmq_envvars) - if integrations.smtp_parameters: - smtp = integrations.smtp_parameters - env.update( - (k, v) - for k, v in ( - ("SMTP_HOST", smtp.host), - ("SMTP_PORT", str(smtp.port)), - ("SMTP_USER", smtp.user), - ("SMTP_PASSWORD", smtp.password), - ("SMTP_PASSWORD_ID", smtp.password_id), - ("SMTP_AUTH_TYPE", smtp.auth_type), - ("SMTP_TRANSPORT_SECURITY", smtp.transport_security), - ("SMTP_DOMAIN", smtp.domain), - ("SMTP_SKIP_SSL_VERIFY", smtp.skip_ssl_verify), - ) - if v is not None - ) return {prefix + k: v for k, v in env.items()} diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 8f61fe4..a1f323a 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -52,13 +52,6 @@ logger.exception( "Missing charm library, please run " "`charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing`" - -try: - # pylint: disable=ungrouped-imports - from charms.smtp_integrator.v0.smtp import SmtpRequires -except ImportError: - logger.exception( - "Missing charm library, please run `charmcraft fetch-lib charms.smtp_integrator.v0.smtp`" ) @@ -106,12 +99,6 @@ def __init__(self, framework: ops.Framework, framework_name: str) -> None: self._rabbitmq = self._init_rabbitmq(requires) self._tracing = self._init_tracing(requires) - if "smtp" in requires and requires["smtp"].interface_name == "smtp": - self._smtp = SmtpRequires(self) - self.framework.observe(self._smtp.on.smtp_data_available, self._on_smtp_data_available) - else: - self._smtp = None - self._database_migration = DatabaseMigration( container=self.unit.get_container(self._workload_config.container_name), state_dir=self._workload_config.state_dir, @@ -431,9 +418,6 @@ def _missing_required_other_integrations( if self._tracing and not charm_state.integrations.tempo_parameters: if not requires["tracing"].optional: yield "tracing" - if self._smtp and not charm_state.integrations.smtp_parameters: - if not requires["smtp"].optional: - yield "smtp" def _missing_required_integrations( self, charm_state: CharmState @@ -490,14 +474,6 @@ def _create_charm_state(self) -> CharmState: saml_relation_data = None if self._saml and (saml_data := self._saml.get_relation_data()): saml_relation_data = saml_data.to_relation_data() - smtp_relation_data = None - if self._smtp and (smtp_data := self._smtp.get_relation_data()): - smtp_relation_data = smtp_data.to_relation_data() - logger.info(f"===========smtp_relation_data: {smtp_relation_data}") - if smtp_relation_data.get("password_id", None): - secret = self.model.get_secret(id=smtp_relation_data.password_id) - content = secret.get_content() - logger.info(f"Setting SMTP secret content: {content}") charm_config = {k: config_get_with_secret(self, k) for k in self.config.keys()} config = typing.cast( dict, @@ -518,7 +494,6 @@ def _create_charm_state(self) -> CharmState: rabbitmq_uri=self._rabbitmq.rabbitmq_uri() if self._rabbitmq else None, tempo_parameters=TempoParameters.from_charm(name=self.app.name, tracing=self._tracing), base_url=self._base_url, - smtp_relation_data=smtp_relation_data, ) @property @@ -642,8 +617,3 @@ def _on_tracing_relation_changed(self, _: ops.HookEvent) -> None: def _on_tracing_relation_broken(self, _: ops.HookEvent) -> None: """Handle tracing relation broken event.""" self.restart() - - @block_if_invalid_config - def _on_smtp_data_available(self, _: ops.HookEvent) -> None: - """Handle smtp data available event.""" - self.restart() diff --git a/src/paas_charm/charm_state.py b/src/paas_charm/charm_state.py index 9b5707c..637bd14 100644 --- a/src/paas_charm/charm_state.py +++ b/src/paas_charm/charm_state.py @@ -7,7 +7,6 @@ import re import typing from dataclasses import dataclass, field -from enum import Enum from typing import Optional from charms.data_platform_libs.v0.data_interfaces import DatabaseRequires @@ -210,7 +209,6 @@ def from_charm( # pylint: disable=too-many-arguments saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, tempo_parameters: TempoParameters | None = None, - smtp_relation_data: dict | None = None, base_url: str | None = None, ) -> "CharmState": """Initialize a new instance of the CharmState class from the associated charm. @@ -227,7 +225,6 @@ def from_charm( # pylint: disable=too-many-arguments rabbitmq_uri: RabbitMQ uri. tempo_parameters: The tracing uri provided by the Tempo coordinator charm and charm name. - smtp_relation_data: Relation data from the SMTP app. base_url: Base URL for the service. Return: @@ -249,7 +246,6 @@ def from_charm( # pylint: disable=too-many-arguments saml_relation_data=saml_relation_data, rabbitmq_uri=rabbitmq_uri, tempo_parameters=tempo_parameters, - smtp_relation_data=smtp_relation_data, ) return cls( framework=framework, @@ -338,7 +334,6 @@ class IntegrationsState: rabbitmq_uri: RabbitMQ uri. tempo_parameters: The tracing uri provided by the Tempo coordinator charm and charm name. - smtp_parameters: SMTP parameters. """ redis_uri: str | None = None @@ -347,10 +342,8 @@ class IntegrationsState: saml_parameters: SamlParameters | None = None rabbitmq_uri: str | None = None tempo_parameters: TempoParameters | None = None - smtp_parameters: "SmtpParameters | None" = None # This dataclass combines all the integrations, so it is reasonable that they stay together. - # flake8: noqa: C901 @classmethod def build( # pylint: disable=too-many-arguments cls, @@ -361,7 +354,6 @@ def build( # pylint: disable=too-many-arguments saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, tempo_parameters: TempoParameters | None = None, - smtp_relation_data: dict | None = None, ) -> "IntegrationsState": """Initialize a new instance of the IntegrationsState class. @@ -375,7 +367,6 @@ def build( # pylint: disable=too-many-arguments rabbitmq_uri: RabbitMQ uri. tempo_parameters: The tracing uri provided by the Tempo coordinator charm and charm name. - smtp_relation_data: smtp relation data from smtp lib. Return: The IntegrationsState instance created. @@ -413,18 +404,6 @@ def build( # pylint: disable=too-many-arguments # as None while the integration is being created. if redis_uri is not None and re.fullmatch(r"redis://[^:/]+:None", redis_uri): redis_uri = None - - if smtp_relation_data is not None: - try: - smtp_parameters = SmtpParameters(**smtp_relation_data) - except ValidationError as exc: - error_message = build_validation_error_message(exc) - raise CharmConfigInvalidError( - f"Invalid Smtp configuration: {error_message}" - ) from exc - else: - smtp_parameters = None - return cls( redis_uri=redis_uri, databases_uris={ @@ -436,149 +415,4 @@ def build( # pylint: disable=too-many-arguments saml_parameters=saml_parameters, rabbitmq_uri=rabbitmq_uri, tempo_parameters=tempo_parameters, - smtp_parameters=smtp_parameters, ) - - -class S3Parameters(BaseModel): - """Configuration for accessing S3 bucket. - - Attributes: - access_key: AWS access key. - secret_key: AWS secret key. - region: The region to connect to the object storage. - storage_class: Storage Class for objects uploaded to the object storage. - bucket: The bucket name. - endpoint: The endpoint used to connect to the object storage. - path: The path inside the bucket to store objects. - s3_api_version: S3 protocol specific API signature. - s3_uri_style: The S3 protocol specific bucket path lookup type. Can be "path" or "host". - addressing_style: S3 protocol addressing style, can be "path" or "virtual". - attributes: The custom metadata (HTTP headers). - tls_ca_chain: The complete CA chain, which can be used for HTTPS validation. - """ - - access_key: str = Field(alias="access-key") - secret_key: str = Field(alias="secret-key") - region: Optional[str] = None - storage_class: Optional[str] = Field(alias="storage-class", default=None) - bucket: str - endpoint: Optional[str] = None - path: Optional[str] = None - s3_api_version: Optional[str] = Field(alias="s3-api-version", default=None) - s3_uri_style: Optional[str] = Field(alias="s3-uri-style", default=None) - tls_ca_chain: Optional[list[str]] = Field(alias="tls-ca-chain", default=None) - attributes: Optional[list[str]] = None - - @property - def addressing_style(self) -> Optional[str]: - """Translates s3_uri_style to AWS addressing_style.""" - if self.s3_uri_style == "host": - return "virtual" - # If None or "path", it does not change. - return self.s3_uri_style - - -class SamlParameters(BaseModel, extra=Extra.allow): - """Configuration for accessing SAML. - - Attributes: - entity_id: Entity Id of the SP. - metadata_url: URL for the metadata for the SP. - signing_certificate: Signing certificate for the SP. - single_sign_on_redirect_url: Sign on redirect URL for the SP. - """ - - entity_id: str - metadata_url: str - signing_certificate: str = Field(alias="x509certs") - single_sign_on_redirect_url: str = Field(alias="single_sign_on_service_redirect_url") - - @field_validator("signing_certificate") - @classmethod - def validate_signing_certificate_exists(cls, certs: str, _: ValidationInfo) -> str: - """Validate that at least a certificate exists in the list of certificates. - - It is a prerequisite that the fist certificate is the signing certificate, - otherwise this method would return a wrong certificate. - - Args: - certs: Original x509certs field - - Returns: - The validated signing certificate - - Raises: - ValueError: If there is no certificate. - """ - certificate = certs.split(",")[0] - if not certificate: - raise ValueError("Missing x509certs. There should be at least one certificate.") - return certificate - - -class TransportSecurity(str, Enum): - """Represent the transport security values. - - Attributes: - NONE: none - STARTTLS: starttls - TLS: tls - """ - - NONE = "none" - STARTTLS = "starttls" - TLS = "tls" - - -class AuthType(str, Enum): - """Represent the auth type values. - - Attributes: - NONE: none - NOT_PROVIDED: not_provided - PLAIN: plain - """ - - NONE = "none" - NOT_PROVIDED = "not_provided" - PLAIN = "plain" - - -class SmtpParameters(BaseModel, extra=Extra.allow): - """Represent the SMTP relation data. - - Attributes: - host: The hostname or IP address of the outgoing SMTP relay. - port: The port of the outgoing SMTP relay. - user: The SMTP AUTH user to use for the outgoing SMTP relay. - password: The SMTP AUTH password to use for the outgoing SMTP relay. - password_id: The secret ID where the SMTP AUTH password for the SMTP relay is stored. - auth_type: The type used to authenticate with the SMTP relay. - transport_security: The security protocol to use for the outgoing SMTP relay. - domain: The domain used by the emails sent from SMTP relay. - skip_ssl_verify: Specifies if certificate trust verification is skipped in the SMTP relay. - """ - - host: str = Field(..., min_length=1) - port: int = Field(..., ge=1, le=65536) - user: str | None = None - password: str | None = None - password_id: str | None = None - auth_type: AuthType | None = None - transport_security: TransportSecurity | None = None - domain: str | None = None - skip_ssl_verify: str | None = False - - - @field_validator("auth_type") - @classmethod - def validate_auth_type(cls, auth_type: str, _: ValidationInfo) -> str: - if auth_type == AuthType.NONE: - return None - - @field_validator("transport_security") - @classmethod - def validate_transport_security(cls, transport_security: str, _: ValidationInfo) -> str: - if transport_security == TransportSecurity.NONE: - return None diff --git a/tests/conftest.py b/tests/conftest.py index d6fd748..615192d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -15,4 +15,3 @@ def pytest_addoption(parser): parser.addoption("--fastapi-app-image", action="store") parser.addoption("--go-app-image", action="store") parser.addoption("--localstack-address", action="store") - parser.addoption("--kube-config", action="store") diff --git a/tests/integration/flask/conftest.py b/tests/integration/flask/conftest.py index ccccf57..8cc7959 100644 --- a/tests/integration/flask/conftest.py +++ b/tests/integration/flask/conftest.py @@ -3,15 +3,11 @@ """Fixtures for flask charm integration tests.""" -import collections -import logging import os import pathlib -import time from secrets import token_hex import boto3 -import kubernetes import pytest import pytest_asyncio from botocore.config import Config as BotoConfig @@ -25,9 +21,6 @@ PROJECT_ROOT = pathlib.Path(__file__).parent.parent.parent.parent -logger = logging.getLogger(__name__) - - @pytest.fixture(autouse=True) def cwd(): return os.chdir(PROJECT_ROOT / "examples/flask") @@ -345,72 +338,3 @@ async def rabbitmq_k8s_integration_fixture( await flask_app.destroy_relation("rabbitmq", f"{rabbitmq_k8s_app.name}:amqp") await model.wait_for_idle(apps=[flask_app.name, rabbitmq_k8s_app.name], status="active") - - -@pytest.fixture(scope="module", name="load_kube_config") -def load_kube_config_fixture(pytestconfig: pytest.Config): - """Load kubernetes config file.""" - kube_config = pytestconfig.getoption("--kube-config") - kubernetes.config.load_kube_config(config_file=kube_config) - - -@pytest.fixture(scope="module") -def mailcatcher(load_kube_config, ops_test: OpsTest): - """Deploy test mailcatcher service.""" - assert ops_test.model - namespace = ops_test.model.name - v1 = kubernetes.client.CoreV1Api() - pod = kubernetes.client.V1Pod( - api_version="v1", - kind="Pod", - metadata=kubernetes.client.V1ObjectMeta( - name="mailcatcher", - namespace=namespace, - labels={"app.kubernetes.io/name": "mailcatcher"}, - ), - spec=kubernetes.client.V1PodSpec( - containers=[ - kubernetes.client.V1Container( - name="mailcatcher", - image="sj26/mailcatcher", - ports=[ - kubernetes.client.V1ContainerPort(container_port=1025), - kubernetes.client.V1ContainerPort(container_port=1080), - ], - ) - ], - ), - ) - v1.create_namespaced_pod(namespace=namespace, body=pod) - service = kubernetes.client.V1Service( - api_version="v1", - kind="Service", - metadata=kubernetes.client.V1ObjectMeta(name="mailcatcher-service", namespace=namespace), - spec=kubernetes.client.V1ServiceSpec( - type="ClusterIP", - ports=[ - kubernetes.client.V1ServicePort(port=1025, target_port=1025, name="tcp-1025"), - kubernetes.client.V1ServicePort(port=1080, target_port=1080, name="tcp-1080"), - ], - selector={"app.kubernetes.io/name": "mailcatcher"}, - ), - ) - v1.create_namespaced_service(namespace=namespace, body=service) - deadline = time.time() + 300 - while True: - if time.time() > deadline: - raise TimeoutError("timeout while waiting for mailcatcher pod") - try: - pod = v1.read_namespaced_pod(name="mailcatcher", namespace=namespace) - if pod.status.phase == "Running": - logger.info("mailcatcher running at %s", pod.status.pod_ip) - break - except kubernetes.client.ApiException: - pass - logger.info("waiting for mailcatcher pod") - time.sleep(1) - SmtpCredential = collections.namedtuple("SmtpCredential", "host port") - return SmtpCredential( - host=f"mailcatcher-service.{namespace}.svc.cluster.local", - port=1025, - ) diff --git a/tests/integration/flask/test_charm.py b/tests/integration/flask/test_charm.py index e030529..6019ea6 100644 --- a/tests/integration/flask/test_charm.py +++ b/tests/integration/flask/test_charm.py @@ -41,7 +41,6 @@ async def test_flask_is_up( @pytest.mark.parametrize( "update_config, timeout", [ - pytest.param({"webserver-timeout": 3}, 3, id="timeout=9"), pytest.param({"webserver-timeout": 7}, 7, id="timeout=7"), pytest.param({"webserver-timeout": 5}, 5, id="timeout=5"), pytest.param({"webserver-timeout": 3}, 3, id="timeout=3"), diff --git a/tests/integration/flask/test_integrations.py b/tests/integration/flask/test_integrations.py index 5d467e8..ff7b1e5 100644 --- a/tests/integration/flask/test_integrations.py +++ b/tests/integration/flask/test_integrations.py @@ -167,46 +167,3 @@ async def test_saml_integration( entity_id_url._replace(path="sso") ) assert env["SAML_SIGNING_CERTIFICATE"] in saml_helper.CERTIFICATE.replace("\n", "") - - -async def test_smtp_integration( - ops_test: OpsTest, - flask_app: Application, - model: Model, - get_unit_ips, - mailcatcher, -): - """ - arrange: build and deploy the flask charm. Create the s3 bucket. - act: Integrate the charm with the s3-integrator. - assert: the flask application should return in the endpoint /env - the correct S3 env variables. - """ - smtp_config = { - "auth_type": "none", - "domain": "example.com", - "host": mailcatcher.host, - "port": mailcatcher.port, - } - smtp_integrator_app = await model.deploy( - "smtp-integrator", - channel="latest/edge", - config=smtp_config, - ) - await model.wait_for_idle() - await model.add_relation(flask_app.name, f"{smtp_integrator_app.name}:smtp") - - await model.wait_for_idle( - idle_period=30, - apps=[flask_app.name, smtp_integrator_app.name], - status="active", - ) - - for unit_ip in await get_unit_ips(flask_app.name): - response = requests.get(f"http://{unit_ip}:8000/env", timeout=5) - assert response.status_code == 200 - env = response.json() - assert env["SMTP_HOST"] == smtp_config["host"] - assert env["SMTP_DOMAIN"] == smtp_config["domain"] - assert env["SMTP_PORT"] == smtp_config["port"] - assert env.get("SMTP_AUTH_TYPE") == None diff --git a/tox.ini b/tox.ini index af71542..8b4ae53 100644 --- a/tox.ini +++ b/tox.ini @@ -69,8 +69,6 @@ commands = --skip {toxinidir}/examples/fastapi/charm/lib \ --skip {toxinidir}/examples/go/charm/lib \ --skip {toxinidir}/examples/flask/lib \ - --skip {toxinidir}/examples/fastapi/charm/lib \ - --skip {toxinidir}/examples/go/charm/lib \ --skip {toxinidir}/tests/integration/.mypy_cache # pflake8 wrapper supports config from pyproject.toml pflake8 {[vars]src_path} {[vars]legacy_src_path} From 3b77a71f4eca712c164d0f1e2967c040ed815bb2 Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Wed, 29 Jan 2025 14:03:29 +0300 Subject: [PATCH 67/70] Chore(reqs): Pinned Jinja2 version. --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 3d372d7..def324f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,4 +2,4 @@ cosl jsonschema >=4.23,<4.24 ops >= 2.6 pydantic==2.10.6 -Jinja2 +Jinja2==3.1.5 From 0e469b1f8f644b7bb5db4014c291188c52368cab Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Wed, 29 Jan 2025 14:04:13 +0300 Subject: [PATCH 68/70] Chore(state): Addressed comment about Tempo state and how it is initialized. --- src/paas_charm/charm.py | 11 +- src/paas_charm/charm_state.py | 360 ++++++++++++++------------- src/paas_charm/charm_utils.py | 2 +- tests/unit/flask/test_charm.py | 2 +- tests/unit/flask/test_charm_state.py | 10 +- 5 files changed, 207 insertions(+), 178 deletions(-) diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index a1f323a..873f4be 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -15,7 +15,7 @@ from pydantic import BaseModel, ValidationError from paas_charm.app import App, WorkloadConfig -from paas_charm.charm_state import CharmState, TempoParameters +from paas_charm.charm_state import CharmState from paas_charm.charm_utils import block_if_invalid_config from paas_charm.database_migration import DatabaseMigration, DatabaseMigrationStatus from paas_charm.databases import make_database_requirers @@ -474,6 +474,13 @@ def _create_charm_state(self) -> CharmState: saml_relation_data = None if self._saml and (saml_data := self._saml.get_relation_data()): saml_relation_data = saml_data.to_relation_data() + + tempo_relation_data = None + if self._tracing and self._tracing.is_ready(): + tempo_relation_data = { + "service_name": self.app.name, + "endpoint": f"{self._tracing.get_endpoint(protocol="otlp_http")}", + } charm_config = {k: config_get_with_secret(self, k) for k in self.config.keys()} config = typing.cast( dict, @@ -492,7 +499,7 @@ def _create_charm_state(self) -> CharmState: s3_connection_info=self._s3.get_s3_connection_info() if self._s3 else None, saml_relation_data=saml_relation_data, rabbitmq_uri=self._rabbitmq.rabbitmq_uri() if self._rabbitmq else None, - tempo_parameters=TempoParameters.from_charm(name=self.app.name, tracing=self._tracing), + tempo_relation_data=tempo_relation_data, base_url=self._base_url, ) diff --git a/src/paas_charm/charm_state.py b/src/paas_charm/charm_state.py index 637bd14..bb9cf61 100644 --- a/src/paas_charm/charm_state.py +++ b/src/paas_charm/charm_state.py @@ -19,139 +19,6 @@ logger = logging.getLogger(__name__) -try: - # pylint: disable=ungrouped-imports - from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer -except ImportError: - logger.exception( - "Missing charm library, please run " - "`charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing`" - ) - - -class TempoParameters(BaseModel): - """Configuration for accessing Tempo service. - - Attributes: - endpoint: Tempo endpoint URL to send the traces. - service_name: Tempo service name for the workload. - """ - - endpoint: str | None = None - service_name: str | None = None - - @classmethod - def from_charm( - cls, *, name: str, tracing: TracingEndpointRequirer | None - ) -> "TempoParameters | None": - """Initialize a new instance of the TempoParameters class from the associated charm. - - Args: - name: Name of the tracing service. - tracing: The tracing integration object. - - Return: - The TempoParameters instance created by the provided charm if - Tempo is relation is ready. - """ - if tracing and tracing.is_ready(): - return cls( - endpoint=f"{tracing.get_endpoint(protocol='otlp_http')}", - service_name=name, - ) - return None - - -class S3Parameters(BaseModel): - """Configuration for accessing S3 bucket. - - Attributes: - access_key: AWS access key. - secret_key: AWS secret key. - region: The region to connect to the object storage. - storage_class: Storage Class for objects uploaded to the object storage. - bucket: The bucket name. - endpoint: The endpoint used to connect to the object storage. - path: The path inside the bucket to store objects. - s3_api_version: S3 protocol specific API signature. - s3_uri_style: The S3 protocol specific bucket path lookup type. Can be "path" or "host". - addressing_style: S3 protocol addressing style, can be "path" or "virtual". - attributes: The custom metadata (HTTP headers). - tls_ca_chain: The complete CA chain, which can be used for HTTPS validation. - """ - - access_key: str = Field(alias="access-key") - secret_key: str = Field(alias="secret-key") - region: Optional[str] = None - storage_class: Optional[str] = Field(alias="storage-class", default=None) - bucket: str - endpoint: Optional[str] = None - path: Optional[str] = None - s3_api_version: Optional[str] = Field(alias="s3-api-version", default=None) - s3_uri_style: Optional[str] = Field(alias="s3-uri-style", default=None) - tls_ca_chain: Optional[list[str]] = Field(alias="tls-ca-chain", default=None) - attributes: Optional[list[str]] = None - - @property - def addressing_style(self) -> Optional[str]: - """Translates s3_uri_style to AWS addressing_style.""" - if self.s3_uri_style == "host": - return "virtual" - # If None or "path", it does not change. - return self.s3_uri_style - - -class SamlParameters(BaseModel, extra=Extra.allow): - """Configuration for accessing SAML. - - Attributes: - entity_id: Entity Id of the SP. - metadata_url: URL for the metadata for the SP. - signing_certificate: Signing certificate for the SP. - single_sign_on_redirect_url: Sign on redirect URL for the SP. - """ - - entity_id: str - metadata_url: str - signing_certificate: str = Field(alias="x509certs") - single_sign_on_redirect_url: str = Field(alias="single_sign_on_service_redirect_url") - - @field_validator("signing_certificate") - @classmethod - def validate_signing_certificate_exists(cls, certs: str, _: ValidationInfo) -> str: - """Validate that at least a certificate exists in the list of certificates. - - It is a prerequisite that the fist certificate is the signing certificate, - otherwise this method would return a wrong certificate. - - Args: - certs: Original x509certs field - - Returns: - The validated signing certificate - - Raises: - ValueError: If there is no certificate. - """ - certificate = certs.split(",")[0] - if not certificate: - raise ValueError("Missing x509certs. There should be at least one certificate.") - return certificate - - -class ProxyConfig(BaseModel): - """Configuration for network access through proxy. - - Attributes: - http_proxy: The http proxy URL. - https_proxy: The https proxy URL. - no_proxy: Comma separated list of hostnames to bypass proxy. - """ - - http_proxy: str | None = Field(default=None, pattern="https?://.+") - https_proxy: str | None = Field(default=None, pattern="https?://.+") - no_proxy: typing.Optional[str] = None - # too-many-instance-attributes is okay since we use a factory function to construct the CharmState class CharmState: # pylint: disable=too-many-instance-attributes @@ -208,7 +75,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None = None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, - tempo_parameters: TempoParameters | None = None, + tempo_relation_data: dict[str, str] | None = None, base_url: str | None = None, ) -> "CharmState": """Initialize a new instance of the CharmState class from the associated charm. @@ -223,7 +90,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info: Connection info from S3 lib. saml_relation_data: Relation data from the SAML app. rabbitmq_uri: RabbitMQ uri. - tempo_parameters: The tracing uri provided by the Tempo coordinator charm + tempo_relation_data: The tracing uri provided by the Tempo coordinator charm and charm name. base_url: Base URL for the service. @@ -245,7 +112,7 @@ def from_charm( # pylint: disable=too-many-arguments s3_connection_info=s3_connection_info, saml_relation_data=saml_relation_data, rabbitmq_uri=rabbitmq_uri, - tempo_parameters=tempo_parameters, + tempo_relation_data=tempo_relation_data, ) return cls( framework=framework, @@ -338,10 +205,85 @@ class IntegrationsState: redis_uri: str | None = None databases_uris: dict[str, str] = field(default_factory=dict) - s3_parameters: S3Parameters | None = None - saml_parameters: SamlParameters | None = None + s3_parameters: "S3Parameters | None" = None + saml_parameters: "SamlParameters | None" = None rabbitmq_uri: str | None = None - tempo_parameters: TempoParameters | None = None + tempo_parameters: "TempoParameters | None" = None + + @classmethod + def generate_saml_relation_parameters( + cls, + saml_relation_data: typing.MutableMapping[str, str] | None, + parameter_type: type, + ) -> "SamlParameters | None": + """Generate SAML relation parameter class from relation data. + + Args: + saml_relation_data: Relation data. + parameter_type: Parameter type to use. + + Return: + Parameter instance created. + + Raises: + CharmConfigInvalidError: If some parameter in invalid. + """ + if saml_relation_data is not None: + try: + relation_parameter = parameter_type(**saml_relation_data) + except ValidationError as exc: + error_message = build_validation_error_message(exc) + raise CharmConfigInvalidError(f"Invalid configuration: {error_message}") from exc + else: + relation_parameter = None + return relation_parameter + + @classmethod + def generate_relation_parameters( + cls, + relation_data: dict[str, str] | None, + parameter_type: type, + ) -> "SamlParameters | S3Parameters | TempoParameters | None": + """Generate relation parameter class from relation data. + + Args: + relation_data: Relation data. + parameter_type: Parameter type to use. + + Return: + Parameter instance created. + + Raises: + CharmConfigInvalidError: If some parameter in invalid. + """ + if relation_data: + try: + relation_parameter = parameter_type(**relation_data) + except ValidationError as exc: + error_message = build_validation_error_message(exc) + raise CharmConfigInvalidError(f"Invalid configuration: {error_message}") from exc + else: + relation_parameter = None + return relation_parameter + + @classmethod + def _collect_relation_parameters( + cls, + s3_connection_info: dict[str, str] | None, + saml_relation_data: typing.MutableMapping[str, str] | None = None, + tempo_relation_data: dict[str, str] | None = None, + ) -> typing.Generator: + """Collect relation parameter classes from relation data. + + Args: + s3_connection_info: S3 relation data. + saml_relation_data: SAML relation data. + tempo_relation_data: Tempo relation data. + + """ + yield cls.generate_relation_parameters(s3_connection_info, S3Parameters) + yield cls.generate_saml_relation_parameters(saml_relation_data, SamlParameters) + yield cls.generate_relation_parameters(tempo_relation_data, TempoParameters) # This dataclass combines all the integrations, so it is reasonable that they stay together. @classmethod @@ -353,7 +295,7 @@ def build( # pylint: disable=too-many-arguments s3_connection_info: dict[str, str] | None, saml_relation_data: typing.MutableMapping[str, str] | None = None, rabbitmq_uri: str | None = None, - tempo_parameters: TempoParameters | None = None, + tempo_relation_data: dict[str, str] | None = None, ) -> "IntegrationsState": """Initialize a new instance of the IntegrationsState class. @@ -365,40 +307,17 @@ def build( # pylint: disable=too-many-arguments s3_connection_info: S3 connection info from S3 lib. saml_relation_data: Saml relation data from saml lib. rabbitmq_uri: RabbitMQ uri. - tempo_parameters: The tracing uri provided by the Tempo coordinator charm + tempo_relation_data: The tracing uri provided by the Tempo coordinator charm and charm name. Return: The IntegrationsState instance created. - - Raises: - CharmConfigInvalidError: If some parameter in invalid. """ - if s3_connection_info: - try: - # s3_connection_info is not really a Dict[str, str] as stated in - # charms.data_platform_libs.v0.s3. It is really a - # Dict[str, str | list[str]]. - # Ignoring as mypy does not work correctly with that information. - s3_parameters = S3Parameters(**s3_connection_info) # type: ignore[arg-type] - except ValidationError as exc: - error_message = build_validation_error_message(exc) - raise CharmConfigInvalidError( - f"Invalid S3 configuration: {error_message}" - ) from exc - else: - s3_parameters = None - - if saml_relation_data is not None: - try: - saml_parameters = SamlParameters(**saml_relation_data) - except ValidationError as exc: - error_message = build_validation_error_message(exc) - raise CharmConfigInvalidError( - f"Invalid Saml configuration: {error_message}" - ) from exc - else: - saml_parameters = None + s3_parameters, saml_parameters, tempo_parameters = list( + cls._collect_relation_parameters( + s3_connection_info, saml_relation_data, tempo_relation_data + ) + ) # Workaround as the Redis library temporarily sends the port # as None while the integration is being created. @@ -416,3 +335,106 @@ def build( # pylint: disable=too-many-arguments rabbitmq_uri=rabbitmq_uri, tempo_parameters=tempo_parameters, ) + + +class TempoParameters(BaseModel): + """Configuration for accessing Tempo service. + + Attributes: + endpoint: Tempo endpoint URL to send the traces. + service_name: Tempo service name for the workload. + """ + + endpoint: str | None = None + service_name: str | None = None + + +class S3Parameters(BaseModel): + """Configuration for accessing S3 bucket. + + Attributes: + access_key: AWS access key. + secret_key: AWS secret key. + region: The region to connect to the object storage. + storage_class: Storage Class for objects uploaded to the object storage. + bucket: The bucket name. + endpoint: The endpoint used to connect to the object storage. + path: The path inside the bucket to store objects. + s3_api_version: S3 protocol specific API signature. + s3_uri_style: The S3 protocol specific bucket path lookup type. Can be "path" or "host". + addressing_style: S3 protocol addressing style, can be "path" or "virtual". + attributes: The custom metadata (HTTP headers). + tls_ca_chain: The complete CA chain, which can be used for HTTPS validation. + """ + + access_key: str = Field(alias="access-key") + secret_key: str = Field(alias="secret-key") + region: Optional[str] = None + storage_class: Optional[str] = Field(alias="storage-class", default=None) + bucket: str + endpoint: Optional[str] = None + path: Optional[str] = None + s3_api_version: Optional[str] = Field(alias="s3-api-version", default=None) + s3_uri_style: Optional[str] = Field(alias="s3-uri-style", default=None) + tls_ca_chain: Optional[list[str]] = Field(alias="tls-ca-chain", default=None) + attributes: Optional[list[str]] = None + + @property + def addressing_style(self) -> Optional[str]: + """Translates s3_uri_style to AWS addressing_style.""" + if self.s3_uri_style == "host": + return "virtual" + # If None or "path", it does not change. + return self.s3_uri_style + + +class SamlParameters(BaseModel, extra=Extra.allow): + """Configuration for accessing SAML. + + Attributes: + entity_id: Entity Id of the SP. + metadata_url: URL for the metadata for the SP. + signing_certificate: Signing certificate for the SP. + single_sign_on_redirect_url: Sign on redirect URL for the SP. + """ + + entity_id: str + metadata_url: str + signing_certificate: str = Field(alias="x509certs") + single_sign_on_redirect_url: str = Field(alias="single_sign_on_service_redirect_url") + + @field_validator("signing_certificate") + @classmethod + def validate_signing_certificate_exists(cls, certs: str, _: ValidationInfo) -> str: + """Validate that at least a certificate exists in the list of certificates. + + It is a prerequisite that the fist certificate is the signing certificate, + otherwise this method would return a wrong certificate. + + Args: + certs: Original x509certs field + + Returns: + The validated signing certificate + + Raises: + ValueError: If there is no certificate. + """ + certificate = certs.split(",")[0] + if not certificate: + raise ValueError("Missing x509certs. There should be at least one certificate.") + return certificate + + +class ProxyConfig(BaseModel): + """Configuration for network access through proxy. + + Attributes: + http_proxy: The http proxy URL. + https_proxy: The https proxy URL. + no_proxy: Comma separated list of hostnames to bypass proxy. + """ + + http_proxy: str | None = Field(default=None, pattern="https?://.+") + https_proxy: str | None = Field(default=None, pattern="https?://.+") + no_proxy: typing.Optional[str] = None diff --git a/src/paas_charm/charm_utils.py b/src/paas_charm/charm_utils.py index f95402b..ac3a918 100644 --- a/src/paas_charm/charm_utils.py +++ b/src/paas_charm/charm_utils.py @@ -33,7 +33,7 @@ def update_app_and_unit_status(self, status: ops.StatusBase) -> None: def block_if_invalid_config( - method: typing.Callable[[C, E], None] + method: typing.Callable[[C, E], None], ) -> typing.Callable[[C, E], None]: """Create a decorator that puts the charm in blocked state if the config is wrong. diff --git a/tests/unit/flask/test_charm.py b/tests/unit/flask/test_charm.py index bd72614..7cbd282 100644 --- a/tests/unit/flask/test_charm.py +++ b/tests/unit/flask/test_charm.py @@ -361,4 +361,4 @@ def test_invalid_integration(harness: Harness): harness.add_relation("s3", "s3-integration", app_data=s3_relation_data) harness.begin_with_initial_hooks() assert isinstance(harness.model.unit.status, ops.BlockedStatus) - assert "S3" in str(harness.model.unit.status.message) + assert "Invalid configuration" in str(harness.model.unit.status.message) diff --git a/tests/unit/flask/test_charm_state.py b/tests/unit/flask/test_charm_state.py index c59a6c7..cc84199 100644 --- a/tests/unit/flask/test_charm_state.py +++ b/tests/unit/flask/test_charm_state.py @@ -152,7 +152,7 @@ def test_s3_integration_raises(): database_requirers={}, s3_connection_info={"bucket": "bucket"}, ) - assert "S3" in str(exc) + assert "Invalid configuration" in str(exc) @pytest.mark.parametrize( @@ -211,7 +211,7 @@ def _test_saml_integration_invalid_parameters(): params.append( pytest.param( {}, - ["Invalid Saml"], + ["Invalid configuration"], id="Empty relation data", ) ) @@ -220,7 +220,7 @@ def _test_saml_integration_invalid_parameters(): params.append( pytest.param( saml_app_relation_data, - ["Invalid Saml", "single_sign_on_service_redirect_url"], + ["Invalid configuration", "single_sign_on_service_redirect_url"], id="Missing single_sign_on_service_redirect_url", ) ) @@ -229,7 +229,7 @@ def _test_saml_integration_invalid_parameters(): params.append( pytest.param( saml_app_relation_data, - ["Invalid Saml", "x509certs"], + ["Invalid configuration", "x509certs"], id="Missing x509certs", ) ) @@ -238,7 +238,7 @@ def _test_saml_integration_invalid_parameters(): params.append( pytest.param( saml_app_relation_data, - ["Invalid Saml", "x509certs"], + ["Invalid configuration", "x509certs"], id="Empty x509certs", ) ) From d2899a05d367c8dc9185e2b34a0d8b9cfa645fec Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Wed, 29 Jan 2025 14:16:29 +0300 Subject: [PATCH 69/70] Chore(tracing): Fix string format --- src/paas_charm/charm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/paas_charm/charm.py b/src/paas_charm/charm.py index 873f4be..4baa4dc 100644 --- a/src/paas_charm/charm.py +++ b/src/paas_charm/charm.py @@ -479,7 +479,7 @@ def _create_charm_state(self) -> CharmState: if self._tracing and self._tracing.is_ready(): tempo_relation_data = { "service_name": self.app.name, - "endpoint": f"{self._tracing.get_endpoint(protocol="otlp_http")}", + "endpoint": f"{self._tracing.get_endpoint(protocol='otlp_http')}", } charm_config = {k: config_get_with_secret(self, k) for k in self.config.keys()} config = typing.cast( From e877780e9a674799d12aabd6373878e5339b2d1c Mon Sep 17 00:00:00 2001 From: Ali Ugur Date: Thu, 30 Jan 2025 13:14:56 +0300 Subject: [PATCH 70/70] Chore(): FIx generative functions. --- src/paas_charm/charm_state.py | 155 ++++++++++++++------------- tests/unit/flask/test_charm.py | 2 +- tests/unit/flask/test_charm_state.py | 11 +- 3 files changed, 86 insertions(+), 82 deletions(-) diff --git a/src/paas_charm/charm_state.py b/src/paas_charm/charm_state.py index bb9cf61..8c38809 100644 --- a/src/paas_charm/charm_state.py +++ b/src/paas_charm/charm_state.py @@ -210,81 +210,6 @@ class IntegrationsState: rabbitmq_uri: str | None = None tempo_parameters: "TempoParameters | None" = None - @classmethod - def generate_saml_relation_parameters( - cls, - saml_relation_data: typing.MutableMapping[str, str] | None, - parameter_type: type, - ) -> "SamlParameters | None": - """Generate SAML relation parameter class from relation data. - - Args: - saml_relation_data: Relation data. - parameter_type: Parameter type to use. - - Return: - Parameter instance created. - - Raises: - CharmConfigInvalidError: If some parameter in invalid. - """ - if saml_relation_data is not None: - try: - relation_parameter = parameter_type(**saml_relation_data) - except ValidationError as exc: - error_message = build_validation_error_message(exc) - raise CharmConfigInvalidError(f"Invalid configuration: {error_message}") from exc - else: - relation_parameter = None - return relation_parameter - - @classmethod - def generate_relation_parameters( - cls, - relation_data: dict[str, str] | None, - parameter_type: type, - ) -> "SamlParameters | S3Parameters | TempoParameters | None": - """Generate relation parameter class from relation data. - - Args: - relation_data: Relation data. - parameter_type: Parameter type to use. - - Return: - Parameter instance created. - - Raises: - CharmConfigInvalidError: If some parameter in invalid. - """ - if relation_data: - try: - relation_parameter = parameter_type(**relation_data) - except ValidationError as exc: - error_message = build_validation_error_message(exc) - raise CharmConfigInvalidError(f"Invalid configuration: {error_message}") from exc - else: - relation_parameter = None - return relation_parameter - - @classmethod - def _collect_relation_parameters( - cls, - s3_connection_info: dict[str, str] | None, - saml_relation_data: typing.MutableMapping[str, str] | None = None, - tempo_relation_data: dict[str, str] | None = None, - ) -> typing.Generator: - """Collect relation parameter classes from relation data. - - Args: - s3_connection_info: S3 relation data. - saml_relation_data: SAML relation data. - tempo_relation_data: Tempo relation data. - - """ - yield cls.generate_relation_parameters(s3_connection_info, S3Parameters) - yield cls.generate_saml_relation_parameters(saml_relation_data, SamlParameters) - yield cls.generate_relation_parameters(tempo_relation_data, TempoParameters) - # This dataclass combines all the integrations, so it is reasonable that they stay together. @classmethod def build( # pylint: disable=too-many-arguments @@ -314,7 +239,7 @@ def build( # pylint: disable=too-many-arguments The IntegrationsState instance created. """ s3_parameters, saml_parameters, tempo_parameters = list( - cls._collect_relation_parameters( + collect_relation_parameters( s3_connection_info, saml_relation_data, tempo_relation_data ) ) @@ -438,3 +363,81 @@ class ProxyConfig(BaseModel): http_proxy: str | None = Field(default=None, pattern="https?://.+") https_proxy: str | None = Field(default=None, pattern="https?://.+") no_proxy: typing.Optional[str] = None + + +def generate_saml_relation_parameters( + saml_relation_data: typing.MutableMapping[str, str] | None, + parameter_type: type, +) -> "SamlParameters | None": + """Generate SAML relation parameter class from relation data. + + Args: + saml_relation_data: Relation data. + parameter_type: Parameter type to use. + + Return: + Parameter instance created. + + Raises: + CharmConfigInvalidError: If some parameter in invalid. + """ + if saml_relation_data is None: + return None + try: + return parameter_type(**saml_relation_data) + except ValidationError as exc: + error_message = build_validation_error_message(exc) + raise CharmConfigInvalidError( + f"Invalid {parameter_type.__name__} configuration: {error_message}" + ) from exc + + +def generate_relation_parameters( + relation_data: dict[str, str] | None, + parameter_type: type, +) -> "SamlParameters | S3Parameters | TempoParameters | None": + """Generate relation parameter class from relation data. + + Args: + relation_data: Relation data. + parameter_type: Parameter type to use. + + Return: + Parameter instance created. + + Raises: + CharmConfigInvalidError: If some parameter in invalid. + """ + if not relation_data: + return None + try: + return parameter_type(**relation_data) + except ValidationError as exc: + error_message = build_validation_error_message(exc) + raise CharmConfigInvalidError( + f"Invalid {parameter_type.__name__} configuration: {error_message}" + ) from exc + + +def collect_relation_parameters( + s3_connection_info: dict[str, str] | None, + saml_relation_data: typing.MutableMapping[str, str] | None = None, + tempo_relation_data: dict[str, str] | None = None, +) -> typing.Generator: + """Collect relation parameter classes from relation data. + + Args: + s3_connection_info: S3 relation data. + saml_relation_data: SAML relation data. + tempo_relation_data: Tempo relation data. + + Yields: + s3_parameters: S3 parameters. + saml_parameters: SAML parameters. + tempo_parameters: Tempo parameters. + + + """ + yield generate_relation_parameters(s3_connection_info, S3Parameters) + yield generate_saml_relation_parameters(saml_relation_data, SamlParameters) + yield generate_relation_parameters(tempo_relation_data, TempoParameters) diff --git a/tests/unit/flask/test_charm.py b/tests/unit/flask/test_charm.py index 7cbd282..d34137b 100644 --- a/tests/unit/flask/test_charm.py +++ b/tests/unit/flask/test_charm.py @@ -361,4 +361,4 @@ def test_invalid_integration(harness: Harness): harness.add_relation("s3", "s3-integration", app_data=s3_relation_data) harness.begin_with_initial_hooks() assert isinstance(harness.model.unit.status, ops.BlockedStatus) - assert "Invalid configuration" in str(harness.model.unit.status.message) + assert "Invalid S3Parameters configuration" in str(harness.model.unit.status.message) diff --git a/tests/unit/flask/test_charm_state.py b/tests/unit/flask/test_charm_state.py index cc84199..d5b701a 100644 --- a/tests/unit/flask/test_charm_state.py +++ b/tests/unit/flask/test_charm_state.py @@ -152,7 +152,7 @@ def test_s3_integration_raises(): database_requirers={}, s3_connection_info={"bucket": "bucket"}, ) - assert "Invalid configuration" in str(exc) + assert "Invalid S3Parameters configuration" in str(exc) @pytest.mark.parametrize( @@ -211,7 +211,7 @@ def _test_saml_integration_invalid_parameters(): params.append( pytest.param( {}, - ["Invalid configuration"], + ["Invalid SamlParameters configuration"], id="Empty relation data", ) ) @@ -220,7 +220,7 @@ def _test_saml_integration_invalid_parameters(): params.append( pytest.param( saml_app_relation_data, - ["Invalid configuration", "single_sign_on_service_redirect_url"], + ["Invalid SamlParameters configuration", "single_sign_on_service_redirect_url"], id="Missing single_sign_on_service_redirect_url", ) ) @@ -229,7 +229,7 @@ def _test_saml_integration_invalid_parameters(): params.append( pytest.param( saml_app_relation_data, - ["Invalid configuration", "x509certs"], + ["Invalid SamlParameters configuration", "x509certs"], id="Missing x509certs", ) ) @@ -238,7 +238,7 @@ def _test_saml_integration_invalid_parameters(): params.append( pytest.param( saml_app_relation_data, - ["Invalid configuration", "x509certs"], + ["Invalid SamlParameters configuration", "x509certs"], id="Empty x509certs", ) ) @@ -267,6 +267,7 @@ def test_saml_integration_invalid(saml_app_relation_data, error_messages): saml_relation_data=saml_app_relation_data, ) for message in error_messages: + print(f"---------- {exc=}") assert message in str(exc)