From 43befd0f280d32ac55f07f9cf47afb7279aa99a6 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Thu, 4 Sep 2025 11:19:52 +0200 Subject: [PATCH 01/34] feat: Data Interfaces V1 --- .../data_platform_libs/v1/data_interfaces.py | 2610 +++++++++++++++++ tests/v1/unit/test_data_interfaces.py | 1870 ++++++++++++ 2 files changed, 4480 insertions(+) create mode 100644 lib/charms/data_platform_libs/v1/data_interfaces.py create mode 100644 tests/v1/unit/test_data_interfaces.py diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py new file mode 100644 index 00000000..dd16934f --- /dev/null +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -0,0 +1,2610 @@ +# Copyright 2025 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +r"""Library to manage the relation for the data-platform products. + +This V1 has been specified in https://docs.google.com/document/d/1lnuonWnoQb36RWYwfHOBwU0VClLbawpTISXIC_yNKYo, and should be backward compatible with v0 clients. + +This library contains the Requires and Provides classes for handling the relation +between an application and multiple managed application supported by the data-team: +MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. + +#### Models + +This library exposes basic default models that can be used in most cases. +If you need more complex models, you can subclass them. + +```python +from charms.data_platform_libs.v1.data_interfaces import RequirerCommonModel, ExtraSecretStr + +class ExtendedCommonModel(RequirerCommonModel): + operator_password: ExtraSecretStr +``` + +Secret groups are handled using annotated types. If you wish to add extra secret groups, please follow the following model. The string metadata represents the secret group name, and `OptionalSecretStr` is a TypeAlias for `SecretStr | None`. Finally, `SecretStr` represents a field validating the URI pattern `secret:.*` + +```python +MyGroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] +``` + +Fields not specified as OptionalSecretStr and extended with a group name in the metadata will NOT get serialised. + + +#### Requirer Charm + +This library is a uniform interface to a selection of common database +metadata, with added custom events that add convenience to database management, +and methods to consume the application related data. + + +```python +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ClientCharm(CharmBase): + # Database charm that accepts connections from application charms. + def __init__(self, *args) -> None: + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + RequirerCommonModel( + entity_type="USER", + ) + ] + self.database = ResourceRequirerEventHandler( + self,"database", requests, response_model=ResourceProviderModel + ) + self.framework.observe(self.database.on.resource_created, self._on_resource_created) + self.framework.observe(self.database.on.resource_entity_created, self._on_resource_entity_created) + + def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new database is created. + relation_id = event.relation.id + response = event.response # This is the response model + + username = event.response.username + password = event.response.password + ... + + def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new entity is created. + ... + +Compared to V1, this library makes heavy use of pydantic models, and allows for +multiple requests, specified as a list. +On the Requirer side, each response will trigger one custom event for that response. +This way, it allows for more strategic events to be emitted according to the request. + +As show above, the library provides some custom events to handle specific situations, which are listed below: +- resource_created: event emitted when the requested database is created. +- resource_entity_created: event emitted when the requested entity is created. +- endpoints_changed: event emitted when the read/write endpoints of the database have changed. +- read_only_endpoints_changed: event emitted when the read-only endpoints of the database + have changed. Event is not triggered if read/write endpoints changed too. + +If it is needed to connect multiple database clusters to the same relation endpoint +the application charm can implement the same code as if it would connect to only +one database cluster (like the above code example). + +To differentiate multiple clusters connected to the same relation endpoint +the application charm can use the name of the remote application: + +```python + +def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Get the remote app name of the cluster that triggered this event + cluster = event.relation.app.name +``` + +It is also possible to provide an alias for each different database cluster/relation. + +So, it is possible to differentiate the clusters in two ways. +The first is to use the remote application name, i.e., `event.relation.app.name`, as above. + +The second way is to use different event handlers to handle each cluster events. +The implementation would be something like the following code: + +```python + +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ApplicationCharm(CharmBase): + # Application charm that connects to database charms. + + def __init__(self, *args): + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + ] + # Define the cluster aliases and one handler for each cluster database created event. + self.database = ResourceRequirerEventHandler( + self, + relation_name="database" + relations_aliases = ["cluster1", "cluster2"], + requests= + ) + self.framework.observe( + self.database.on.cluster1_resource_created, self._on_cluster1_resource_created + ) + self.framework.observe( + self.database.on.cluster2_resource_created, self._on_cluster2_resource_created + ) + + def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster1 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.respones.username, + event.response.password, + event.response.endpoints, + ) + ... + + def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster2 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... +``` + +### Provider Charm + +Following an example of using the ResourceRequestedEvent, in the context of the +database charm code: + +```python +from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides + +class SampleCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + # Charm events defined in the database provides charm library. + self.provided_database = DatabaseProvides(self, relation_name="database") + self.framework.observe(self.provided_database.on.database_requested, + self._on_database_requested) + # Database generic helper + self.database = DatabaseHelper() + + def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: + # Handle the event triggered by a new database requested in the relation + # Retrieve the database name using the charm library. + db_name = event.database + # generate a new user credential + username = self.database.generate_user() + password = self.database.generate_password() + # set the credentials for the relation + self.provided_database.set_credentials(event.relation.id, username, password) + # set other variables for the relation event.set_tls("False") +``` + +As shown above, the library provides a custom event (database_requested) to handle +the situation when an application charm requests a new database to be created. +It's preferred to subscribe to this event instead of relation changed event to avoid +creating a new database when other information other than a database name is +exchanged in the relation databag. + +""" + +import copy +import hashlib +import json +import logging +import pickle +import random +import string +from abc import ABC, abstractmethod +from collections import namedtuple +from datetime import datetime +from enum import Enum +from typing import ( + Annotated, + Any, + ClassVar, + Generic, + Literal, + NewType, + TypeAlias, + TypeVar, + final, + overload, +) + +from ops import ( + CharmBase, + EventBase, + Model, + RelationChangedEvent, + RelationCreatedEvent, + RelationEvent, + Secret, + SecretChangedEvent, + SecretInfo, + SecretNotFoundError, +) +from ops.charm import CharmEvents +from ops.framework import EventSource, Handle, Object +from ops.model import Application, ModelError, Relation, Unit +from pydantic import ( + AfterValidator, + AliasChoices, + BaseModel, + ConfigDict, + Discriminator, + Field, + SecretStr, + SerializationInfo, + SerializerFunctionWrapHandler, + Tag, + TypeAdapter, + ValidationInfo, + model_serializer, + model_validator, +) +from pydantic.types import _SecretBase, _SecretField +from pydantic_core import CoreSchema, core_schema +from typing_extensions import TypeAliasType, override + +# The unique Charmhub library identifier, never change it +LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" + +# Increment this major API version when introducing breaking changes +LIBAPI = 1 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 0 + +PYDEPS = ["ops>=2.0.0", "pydantic>=2.11"] + +logger = logging.getLogger(__name__) + +MODEL_ERRORS = { + "not_leader": "this unit is not the leader", + "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", + "owner_no_refresh": "ERROR secret owner cannot use --refresh", +} + +RESOURCE_ALIASES = [ + "database", + "subject", + "topic", + "index", + "plugin-url", +] + +SECRET_PREFIX = "secret-" + + +############################################################################## +# Exceptions +############################################################################## + + +class DataInterfacesError(Exception): + """Common ancestor for DataInterfaces related exceptions.""" + + +class SecretError(DataInterfacesError): + """Common ancestor for Secrets related exceptions.""" + + +class SecretAlreadyExistsError(SecretError): + """A secret that was to be added already exists.""" + + +class SecretsUnavailableError(SecretError): + """Secrets aren't yet available for Juju version used.""" + + +class IllegalOperationError(DataInterfacesError): + """To be used when an operation is not allowed to be performed.""" + + +############################################################################## +# Global helpers / utilities +############################################################################## + + +def gen_salt() -> str: + """Generates a consistent salt.""" + return "".join(random.choices(string.ascii_letters + string.digits, k=16)) + + +def gen_hash(resource_name: str, salt: str) -> str: + """Generates a consistent hash based on the resource name and salt.""" + hasher = hashlib.sha256() + hasher.update(f"{resource_name}:{salt}".encode()) + return hasher.hexdigest()[:16] + + +def ensure_leader_for_app(f): + """Decorator to ensure that only leader can perform given operation.""" + + def wrapper(self, *args, **kwargs): + if self.component == self._local_app and not self._local_unit.is_leader(): + logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") + return + return f(self, *args, **kwargs) + + wrapper.leader_only = True + return wrapper + + +def get_encoded_dict( + relation: Relation, member: Unit | Application, field: str +) -> dict[str, Any] | None: + """Retrieve and decode an encoded field from relation data.""" + data = json.loads(relation.data[member].get(field, "{}")) + if isinstance(data, dict): + return data + logger.error("Unexpected datatype for %s instead of dict.", str(data)) + + +Diff = namedtuple("Diff", ["added", "changed", "deleted"]) +Diff.__doc__ = """ +A tuple for storing the diff between two data mappings. + +added - keys that were added +changed - keys that still exist but have new values +deleted - key that were deleted""" + + +def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: + """Retrieves the diff of the data in the relation changed databag for v1. + + Args: + old_data: dictionary of the stored data before the event. + new_data: dictionary of the received data to compute the diff. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + old_data = old_data or {} + + # These are the keys that were added to the databag and triggered this event. + added = new_data.keys() - old_data.keys() + # These are the keys that were removed from the databag and triggered this event. + deleted = old_data.keys() - new_data.keys() + # These are the keys that already existed in the databag, + # but had their values changed. + changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} + # Return the diff with all possible changes. + return Diff(added, changed, deleted) + + +def resource_added(diff: Diff) -> bool: + """Ensures that one of the aliased resources has been added.""" + return any(item in diff.added for item in RESOURCE_ALIASES + ["resource"]) + + +def store_new_data( + relation: Relation, + component: Unit | Application, + new_data: dict[str, str], + short_uuid: str | None = None, +): + """Stores the new data in the databag for diff computation.""" + # First, the case for V0 + if not short_uuid: + relation.data[component].update({"data": json.dumps(new_data)}) + # Then the case for V1, where we have a ShortUUID + else: + data = json.loads(relation.data[component].get("data", "{}")) + if not isinstance(data, dict): + raise ValueError + newest_data = copy.deepcopy(data) + newest_data[short_uuid] = new_data + relation.data[component].update({"data": json.dumps(newest_data)}) + + +############################################################################## +# Helper classes +############################################################################## + +SecretGroup = NewType("SecretGroup", str) + + +SecretString = TypeAliasType("SecretString", Annotated[str, Field(pattern="secret:.*")]) + + +class SecretBool(_SecretField[bool]): + """Class for booleans as secrets.""" + + _inner_schema: ClassVar[CoreSchema] = core_schema.bool_schema() + _error_kind: ClassVar[str] = "bool_type" + + def _display(self) -> str: + return "****" + + +OptionalSecretStr: TypeAlias = SecretStr | None +OptionalSecretBool: TypeAlias = SecretBool | None + +OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) + +UserSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "user"] +TlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "tls"] +TlsSecretBool = Annotated[OptionalSecretBool, Field(exclude=True, default=None), "tls"] +MtlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mtls"] +ExtraSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "extra"] +EntitySecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "entity"] + + +class Scope(Enum): + """Peer relations scope.""" + + APP = "app" + UNIT = "unit" + + +class CachedSecret: + """Locally cache a secret. + + The data structure is precisely reusing/simulating as in the actual Secret Storage + """ + + KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] + + def __init__( + self, + model: Model, + component: Application | Unit, + label: str, + secret_uri: str | None = None, + ): + self._secret_meta = None + self._secret_content = {} + self._secret_uri = secret_uri + self.label = label + self._model = model + self.component = component + self.current_label = None + + @property + def meta(self) -> Secret | None: + """Getting cached secret meta-information.""" + if not self._secret_meta: + if not (self._secret_uri or self.label): + return + + try: + self._secret_meta = self._model.get_secret(label=self.label) + except SecretNotFoundError: + # Falling back to seeking for potential legacy labels + logger.info(f"Secret with label {self.label} not found") + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) + return self._secret_meta + + ########################################################################## + # Public functions + ########################################################################## + + def add_secret( + self, + content: dict[str, str], + relation: Relation | None = None, + label: str | None = None, + ) -> Secret: + """Create a new secret.""" + if self._secret_uri: + raise SecretAlreadyExistsError( + "Secret is already defined with uri %s", self._secret_uri + ) + + label = self.label if not label else label + + secret = self.component.add_secret(content, label=label) + if relation and relation.app != self._model.app: + # If it's not a peer relation, grant is to be applied + secret.grant(relation) + self._secret_uri = secret.id + self._secret_meta = secret + return self._secret_meta + + def get_content(self) -> dict[str, str]: + """Getting cached secret content.""" + if not self._secret_content: + if self.meta: + try: + self._secret_content = self.meta.get_content(refresh=True) + except (ValueError, ModelError) as err: + # https://bugs.launchpad.net/juju/+bug/2042596 + # Only triggered when 'refresh' is set + if isinstance(err, ModelError) and not any( + msg in str(err) for msg in self.KNOWN_MODEL_ERRORS + ): + raise + # Due to: ValueError: Secret owner cannot use refresh=True + self._secret_content = self.meta.get_content() + return self._secret_content + + def set_content(self, content: dict[str, str]) -> None: + """Setting cached secret content.""" + if not self.meta: + return + + if content == self.get_content(): + return + + if content: + self.meta.set_content(content) + self._secret_content = content + else: + self.meta.remove_all_revisions() + + def get_info(self) -> SecretInfo | None: + """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" + if self.meta: + return self.meta.get_info() + + def remove(self) -> None: + """Remove secret.""" + if not self.meta: + raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") + try: + self.meta.remove_all_revisions() + except SecretNotFoundError: + pass + self._secret_content = {} + self._secret_meta = None + self._secret_uri = None + + +class SecretCache: + """A data structure storing CachedSecret objects.""" + + def __init__(self, model: Model, component: Application | Unit): + self._model = model + self.component = component + self._secrets: dict[str, CachedSecret] = {} + + def get(self, label: str, uri: str | None = None) -> CachedSecret | None: + """Getting a secret from Juju Secret store or cache.""" + if not self._secrets.get(label): + secret = CachedSecret(self._model, self.component, label, uri) + if secret.meta: + self._secrets[label] = secret + return self._secrets.get(label) + + def add(self, label: str, content: dict[str, str], relation: Relation) -> CachedSecret: + """Adding a secret to Juju Secret.""" + if self._secrets.get(label): + raise SecretAlreadyExistsError(f"Secret {label} already exists") + + secret = CachedSecret(self._model, self.component, label) + secret.add_secret(content, relation) + self._secrets[label] = secret + return self._secrets[label] + + def remove(self, label: str) -> None: + """Remove a secret from the cache.""" + if secret := self.get(label): + try: + secret.remove() + self._secrets.pop(label) + except (SecretsUnavailableError, KeyError): + pass + else: + return + logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) + + +############################################################################## +# Models classes +############################################################################## + + +class PeerModel(BaseModel): + """Common Model for all peer relations.""" + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + ) + + @model_validator(mode="after") + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get("repository") + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + if not secret: + logger.info(f"No secret for group {secret_group}") + continue + + value = secret.get_content().get(aliased_field) + + if value and field_info.annotation == OptionalSecretBool: + value = SecretBool(json.loads(value)) + setattr(self, field, value) + + return self + + @model_serializer(mode="wrap") + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get("repository") + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + if (value := getattr(self, field)) is None: + continue + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + actual_value = ( + value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + + if secret: + content = secret.get_content() + full_content = copy.deepcopy(content) + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + else: + secret = repository.add_secret( + aliased_field, + actual_value, + secret_group, + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + + return handler(self) + + +class CommonModel(BaseModel): + """Common Model for both requirer and provider. + + request_id stores the request identifier for easier access. + resource is the requested resource. + """ + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + ) + + resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") + request_id: str | None = Field(default=None) + salt: str = Field( + description="This salt is used to create unique hashes even when other fields map 1-1", + default_factory=gen_salt, + ) + + @model_validator(mode="after") + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get("repository") + short_uuid = self.request_id or gen_hash(self.resource, self.salt) + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + "-", "_" + ) + secret_uri: str | None = getattr(self, secret_field, None) + + if not secret_uri: + continue + + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + if not secret: + logger.info(f"No secret for group {secret_group} and short uuid {short_uuid}") + continue + + value = secret.get_content().get(aliased_field) + if value and field_info.annotation == OptionalSecretBool: + value = SecretBool(json.loads(value)) + setattr(self, field, value) + return self + + @model_serializer(mode="wrap") + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get("repository") + short_uuid = self.request_id or gen_hash(self.resource, self.salt) + # Backward compatibility for v0 regarding secrets. + if info.context.get("version") == "v0": + short_uuid = None + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + if (value := getattr(self, field)) is None: + continue + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + "-", "_" + ) + secret_uri: str | None = getattr(self, secret_field, None) + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + actual_value = ( + value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + + if secret: + content = secret.get_content() + full_content = copy.deepcopy(content) + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + else: + secret = repository.add_secret( + aliased_field, actual_value, secret_group, short_uuid + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + setattr(self, secret_field, secret.meta.id) + + return handler(self) + + @classmethod + def _get_secret_field(cls, field: str) -> SecretGroup | None: + """Checks if the field is a secret uri or not.""" + if not field.startswith(SECRET_PREFIX): + return None + + value = field.split("-")[1] + if info := cls.__pydantic_fields__.get(field.replace("-", "_")): + if info.annotation == SecretString: + return SecretGroup(value) + return None + + +class EntityPermissionModel(BaseModel): + """Entity Permissions Model.""" + + resource_name: str + resource_type: str + privileges: list + + +class RequirerCommonModel(CommonModel): + """Requirer side of the request model. + + extra_user_roles is used to request more roles for that user. + external_node_connectivity is used to indicate that the URI should be made for external clients when True + """ + + extra_user_roles: str | None = Field(default=None) + extra_group_roles: str | None = Field(default=None) + external_node_connectivity: bool = Field(default=False) + entity_type: Literal["USER", "GROUP"] | None = Field(default=None) + entity_permissions: list[EntityPermissionModel] | None = Field(default=None) + secret_mtls: SecretString | None = Field(default=None) + mtls_cert: MtlsSecretStr = Field(default=None) + + @model_validator(mode="after") + def validate_fields(self): + """Validates that no inconsistent request is being sent.""" + if self.entity_type and self.entity_type not in ["USER", "GROUP"]: + raise ValueError("Invalid entity-type. Possible values are USER and GROUP") + + if self.entity_type == "USER" and self.extra_group_roles: + raise ValueError("Inconsistent entity information. Use extra_user_roles instead") + + if self.entity_type == "GROUP" and self.extra_user_roles: + raise ValueError("Inconsistent entity information. Use extra_group_roles instead") + + return self + + +class ProviderCommonModel(CommonModel): + """Serialized fields added to the databag. + + endpoints stores the endpoints exposed to that client. + secret_user is a secret URI mapping to the user credentials + secret_tls is a secret URI mapping to the TLS certificate + secret_extra is a secret URI for all additional secrets requested. + """ + + endpoints: str | None = Field(default=None) + read_only_endpoints: str | None = Field(default=None) + secret_user: SecretString | None = Field(default=None) + secret_tls: SecretString | None = Field(default=None) + secret_extra: SecretString | None = Field(default=None) + secret_entity: SecretString | None = Field(default=None) + + +class ResourceProviderModel(ProviderCommonModel): + """Extended model including the deserialized fields.""" + + username: UserSecretStr = Field(default=None) + password: UserSecretStr = Field(default=None) + uris: UserSecretStr = Field(default=None) + read_only_uris: UserSecretStr = Field(default=None) + tls: TlsSecretBool = Field(default=None) + tls_ca: TlsSecretStr = Field(default=None) + entity_name: EntitySecretStr = Field(default=None) + entity_password: EntitySecretStr = Field(default=None) + version: str | None = Field(default=None) + + +class RequirerDataContractV0(RequirerCommonModel): + """Backward compatibility.""" + + version: Literal["v0"] = Field(default="v0") + + original_field: str = Field(exclude=True, default="") + + @model_validator(mode="before") + @classmethod + def ensure_original_field(cls, data: Any): + """Ensures that we keep the original field.""" + if isinstance(data, dict): + for alias in RESOURCE_ALIASES: + if data.get(alias) is not None: + data["original_field"] = alias + break + else: + for alias in RESOURCE_ALIASES: + if getattr(data, alias) is not None: + data.original_field = alias + return data + + +TResourceProviderModel = TypeVar("TResourceProviderModel", bound=ResourceProviderModel) +TRequirerCommonModel = TypeVar("TRequirerCommonModel", bound=RequirerCommonModel) + + +class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): + """The new Data Contract.""" + + version: Literal["v1"] = Field(default="v1") + requests: list[TRequirerCommonModel] + + +def discriminate_on_version(payload: Any) -> str: + """Use the version to discriminate.""" + if isinstance(payload, dict): + return payload.get("version", "v0") + return getattr(payload, "version", "v0") + + +RequirerDataContractType = Annotated[ + Annotated[RequirerDataContractV0, Tag("v0")] | Annotated[RequirerDataContractV1, Tag("v1")], + Discriminator(discriminate_on_version), +] + + +RequirerDataContract = TypeAdapter(RequirerDataContractType) + + +class DataContractV0(ResourceProviderModel): + """The Data contract of the response, for V0.""" + + +class DataContractV1(BaseModel, Generic[TResourceProviderModel]): + """The Data contract of the response, for V1.""" + + version: Literal["v1"] = Field(default="v1") + requests: list[TResourceProviderModel] = Field(default_factory=list) + + +DataContact = TypeAdapter(DataContractV1[ResourceProviderModel]) + + +TCommonModel = TypeVar("TCommonModel", bound=CommonModel) + + +def is_topic_value_acceptable(value: str | None): + """Check whether the given Kafka topic value is acceptable.""" + if value and "*" in value[:3]: + raise ValueError(f"Error on topic '{value}',, unacceptable value.") + + +class KafkaRequestModel(RequirerCommonModel): + """Specialised model for Kafka.""" + + consumer_group_prefix: Annotated[str | None, AfterValidator(is_topic_value_acceptable)] = ( + Field(default=None) + ) + + +class KafkaResponseModel(ResourceProviderModel): + """Kafka response model.""" + + consumer_group_prefix: ExtraSecretStr = Field(default=None) + zookeeper_uris: ExtraSecretStr = Field(default=None) + + +############################################################################## +# AbstractRepository class +############################################################################## + + +class AbstractRepository(ABC): + """Abstract repository interface.""" + + @abstractmethod + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + ... + + @abstractmethod + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> str | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def get_field(self, field: str) -> str | None: + """Gets the value for one field.""" + ... + + @abstractmethod + def get_fields(self, *fields: str) -> dict[str, str | None]: + """Gets the values for all provided fields.""" + ... + + @abstractmethod + def write_field(self, field: str, value: Any) -> None: + """Writes the value in the field, without any secret support.""" + ... + + @abstractmethod + def write_fields(self, mapping: dict[str, Any]) -> None: + """Writes the values of mapping in the fields without any secret support (keys of mapping).""" + ... + + def write_secret_field( + self, field: str, value: Any, group: SecretGroup, uri_to_databag: bool = False + ) -> CachedSecret | None: + """Writes a secret field.""" + ... + + @abstractmethod + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def delete_field(self, field: str) -> None: + """Deletes a field.""" + ... + + @abstractmethod + def delete_fields(self, *fields: str) -> None: + """Deletes all the provided fields.""" + ... + + @abstractmethod + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + """Delete a field stored in a secret group.""" + ... + + @abstractmethod + def register_secret(self, secret_group: SecretGroup, short_uuid: str | None = None) -> None: + """Registers a secret using the repository.""" + ... + + @abstractmethod + def get_data(self) -> dict[str, Any] | None: + """Gets the whole data.""" + ... + + @abstractmethod + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Builds a secret field.""" + + +class OpsRepository(AbstractRepository): + """Implementation for ops repositories, with some methods left out.""" + + SECRET_FIELD_NAME: str + + IGNORES_GROUPS: list[SecretGroup] = [] + + uri_to_databag: bool = True + + def __init__( + self, + model: Model, + relation: Relation | None, + component: Unit | Application, + ): + self._local_app = model.app + self._local_unit = model.unit + self.relation = relation + self.component = component + self.model = model + self.secrets = SecretCache(model, component) + + @abstractmethod + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group mapping for secrets within a relation context.""" + ... + + @override + def get_data(self) -> dict[str, Any] | None: + ret: dict[str, Any] = {} + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + for key, value in self.relation.data[self.component].items(): + try: + ret[key] = json.loads(value) + except json.JSONDecodeError: + ret[key] = value + + return ret + + @override + @ensure_leader_for_app + def get_field( + self, + field: str, + ) -> str | None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + relation_data = self.relation.data[self.component] + return relation_data.get(field) + + @override + @ensure_leader_for_app + def get_fields(self, *fields: str) -> dict[str, str]: + res = {} + for field in fields: + if (value := self.get_field(field)) is not None: + res[field] = value + return res + + @override + @ensure_leader_for_app + def write_field(self, field: str, value: Any) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + if not value: + return None + self.relation.data[self.component].update({field: value}) + + @override + @ensure_leader_for_app + def write_fields(self, mapping: dict[str, Any]) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + (self.write_field(field, value) for field, value in mapping.items()) + + @override + @ensure_leader_for_app + def write_secret_field( + self, field: str, value: Any, secret_group: SecretGroup + ) -> CachedSecret | None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = self.get_field(self.secret_field(secret_group, field)) + + secret = self.secrets.get(label=label, uri=secret_uri) + if not secret: + return self.add_secret(field, value, secret_group) + else: + content = secret.get_content() + full_content = copy.deepcopy(content) + full_content.update({field: value}) + secret.set_content(full_content) + return secret + + @override + @ensure_leader_for_app + def delete_field(self, field: str) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + relation_data = self.relation.data[self.component] + try: + relation_data.pop(field) + except KeyError: + logger.debug( + f"Non existent field {field} was attempted to be removed from the databag (relation ID: {self.relation.id})" + ) + + @override + @ensure_leader_for_app + def delete_fields(self, *fields: str) -> None: + (self.delete_field(field) for field in fields) + + @override + @ensure_leader_for_app + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + relation_data = self.relation.data[self.component] + secret_field = self.secret_field(secret_group, field) + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = relation_data.get(secret_field) + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logging.error(f"Can't delete secret for relation {self.relation.id}") + return None + + content = secret.get_content() + new_content = copy.deepcopy(content) + try: + new_content.pop(field) + except KeyError: + logging.debug( + f"Non-existing secret '{field}' was attempted to be removed" + f"from relation {self.relation.id} and group {secret_group}" + ) + + # Write the new secret content if necessary + if new_content: + secret.set_content(new_content) + return + + # Remove the secret from the relation if it's fully gone. + try: + relation_data.pop(field) + except KeyError: + pass + self.secrets.remove(label) + return + + @ensure_leader_for_app + def register_secret(self, uri: str, secret_group: SecretGroup, short_uuid: str | None = None): + """Registers the secret group for this relation. + + [MAGIC HERE] + If we fetch a secret using get_secret(id=, label=), + then will be "stuck" on the Secret object, whenever it may + appear (i.e. as an event attribute, or fetched manually) on future occasions. + + This will allow us to uniquely identify the secret on Provider side (typically on + 'secret-changed' events), and map it to the corresponding relation. + """ + if not self.relation: + raise ValueError("Cannot register without relation.") + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + CachedSecret(self.model, self.component, label, uri).meta + + @override + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + return None + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + return self.secrets.get(label=label, uri=secret_uri) + + @override + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + uri: str | None = None, + short_uuid: str | None = None, + ) -> Any | None: + """Gets a value for a field stored in a secret group.""" + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + return None + + secret_field = self.secret_field(secret_group, field) + + relation_data = self.relation.data[self.component] + secret_uri = uri or relation_data.get(secret_field) + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + if self.uri_to_databag and not secret_uri: + logger.info(f"No secret for group {secret_group} in relation {self.relation}") + return None + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logger.info(f"No secret for group {secret_group} in relation {self.relation}") + return None + + content = secret.get_content().get(field) + + if not content: + return + + try: + return json.loads(content) + except json.JSONDecodeError: + return content + + @override + @ensure_leader_for_app + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + if not self.relation: + logger.info("No relation to get value from") + return None + + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + + label = self._generate_secret_label(self.relation, secret_group, short_uuid) + + secret = self.secrets.add(label, {field: value}, self.relation) + + if not secret.meta or not secret.meta.id: + logging.error("Secret is missing Secret ID") + raise SecretError("Secret added but is missing Secret ID") + + return secret + + +@final +class OpsRelationRepository(OpsRepository): + """Implementation of the Abstract Repository for non peer relations.""" + + SECRET_FIELD_NAME: str = "secret" + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + if short_uuid: + return f"{relation.name}.{relation.id}.{short_uuid}.{secret_group}.secret" + return f"{relation.name}.{relation.id}.{secret_group}.secret" + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + return f"{self.SECRET_FIELD_NAME}-{secret_group}" + + +class OpsPeerRepository(OpsRepository): + """Implementation of the Ops Repository for peer relations.""" + + SECRET_FIELD_NAME = "internal_secret" + + IGNORES_GROUPS = [ + SecretGroup("user"), + SecretGroup("entity"), + SecretGroup("mtls"), + SecretGroup("tls"), + ] + + uri_to_databag: bool = False + + @property + def scope(self) -> Scope: + """Returns a scope.""" + if isinstance(self.component, Application): + return Scope.APP + if isinstance(self.component, Unit): + return Scope.UNIT + raise ValueError("Invalid component, neither a Unit nor an Application") + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + members = [relation.name, self._local_app.name, self.scope.value] + + if secret_group != SecretGroup("extra"): + members.append(secret_group) + return f"{'.'.join(members)}" + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + if not field: + raise ValueError("Must have a field.") + return f"{field}@{secret_group}" + + +@final +class OpsPeerUnitRepository(OpsPeerRepository): + """Implementation for a unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + super().__init__(model, relation, component) + + +@final +class OpsOtherPeerUnitRepository(OpsPeerRepository): + """Implementation for a remote unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + if component == model.unit: + raise ValueError(f"Can't instantiate {self.__class__.__name__} with local unit.") + super().__init__(model, relation, component) + + @override + def write_field(self, field: str, value: Any) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def write_fields(self, mapping: dict[str, Any]) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def add_secret( + self, field: str, value: Any, secret_group: SecretGroup, short_uuid: str | None = None + ) -> CachedSecret | None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_field(self, field: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_fields(self, *fields: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + +TRepository = TypeVar("TRepository", bound=OpsRepository) +TCommon = TypeVar("TCommon", bound=BaseModel) +TPeerCommon = TypeVar("TPeerCommon", bound=PeerModel) +TCommonBis = TypeVar("TCommonBis", bound=BaseModel) + + +class RepositoryInterface(Generic[TRepository, TCommon]): + """Repository builder.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + component: Unit | Application, + repository_type: type[TRepository], + model: type[TCommon] | TypeAdapter | None, + ): + self.charm = charm + self._model = charm.model + self.repository_type = repository_type + self.relation_name = relation_name + self.model = model + self.component = component + + @property + def relations(self) -> list[Relation]: + """The list of Relation instances associated with this relation name.""" + return self._model.relations[self.relation_name] + + def repository( + self, relation_id: int, component: Unit | Application | None = None + ) -> TRepository: + """Returns a repository for the relation.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + return self.repository_type(self._model, relation, component or self.component) + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommon], + component: Unit | Application | None = None, + ) -> TCommon: ... + + @overload + def build_model( + self, + relation_id: int, + model: TypeAdapter[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: None = None, + component: Unit | Application | None = None, + ) -> TCommon: ... + + def build_model( + self, + relation_id: int, + model: type[TCommon] | TypeAdapter[TCommonBis] | None = None, + component: Unit | Application | None = None, + ) -> TCommon | TCommonBis: + """Builds a model using the repository for that relation.""" + model = model or self.model # First the provided model (allows for specialisation) + component = component or self.component + if not model: + raise ValueError("Missing model to specialise data") + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + return build_model(self.repository_type(self._model, relation, component), model) + + def write_model( + self, relation_id: int, model: BaseModel, context: dict[str, str] | None = None + ): + """Writes the model using the repository.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + + write_model( + self.repository_type(self._model, relation, self.component), model, context=context + ) + + +class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, TCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.app, OpsRelationRepository, model) + + +class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.app, OpsPeerRepository, model) + + +class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): + """Specialised Interface to build repositories for this unit peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.unit, OpsPeerUnitRepository, model) + + +class OpsOtherPeerUnitRepositoryInterface( + RepositoryInterface[OpsOtherPeerUnitRepository, TPeerCommon] +): + """Specialised Interface to build repositories for another unit peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + unit: Unit, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, unit, OpsOtherPeerUnitRepository, model) + + +############################################################################## +# DDD implementation methods +############################################################################## +############################################################################## + + +def build_model(repository: AbstractRepository, model: type[TCommon] | TypeAdapter) -> TCommon: + """Builds a common model using the provided repository and provided model structure.""" + data = repository.get_data() or {} + + data.pop("data", None) + + # Beware this means all fields should have a default value here. + if isinstance(model, TypeAdapter): + return model.validate_python(data, context={"repository": repository}) + + return model.model_validate(data, context={"repository": repository}) + + +def write_model( + repository: AbstractRepository, model: BaseModel, context: dict[str, str] | None = None +): + """Writes the data stored in the model using the repository object.""" + context = context or {} + dumped = model.model_dump( + mode="json", context={"repository": repository} | context, exclude_none=True + ) + for field, value in dumped.items(): + dumped_value = value if isinstance(value, str) else json.dumps(value) + repository.write_field(field, dumped_value) + + +############################################################################## +# Custom Events +############################################################################## + + +class ResourceProviderEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.request = request + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["request"] = pickle.dumps(self.request) + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.request = pickle.loads(snapshot["request"]) + + +class ResourceRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource requested event.""" + + pass + + +class ResourceEntityRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource Entity requested event.""" + + pass + + +class ResourceEntityPermissionsChangedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + pass + + +class MtlsCertUpdatedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + old_mtls_cert: str | None = None, + ): + super().__init__(handle, relation, app, unit, request) + + self.old_mtls_cert = old_mtls_cert + + def snapshot(self): + """Return a snapshot of the event.""" + return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} + + def restore(self, snapshot): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.old_mtls_cert = snapshot["old_mtls_cert"] + + +class BulkResourcesRequestedEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + requests: list[TRequirerCommonModel], + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.requests = requests + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["requests"] = [pickle.dumps(request) for request in self.requests] + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.requests = [pickle.loads(request) for request in snapshot["requests"]] + + +class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + bulk_resources_requested = EventSource(BulkResourcesRequestedEvent[TRequirerCommonModel]) + resource_requested = EventSource(ResourceRequestedEvent[TRequirerCommonModel]) + resource_entity_requested = EventSource(ResourceEntityRequestedEvent[TRequirerCommonModel]) + resource_entity_permissions_changed = EventSource( + ResourceEntityPermissionsChangedEvent[TRequirerCommonModel] + ) + mtls_cert_updated = EventSource(MtlsCertUpdatedEvent[TRequirerCommonModel]) + + +class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): + """Resource created/changed event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, response + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + response: TResourceProviderModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.response = response + + def snapshot(self) -> dict: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["response"] = pickle.dumps(self.response) + return snapshot + + def restore(self, snapshot: dict): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + + self.response = pickle.loads(snapshot["response"]) + + +class ResourceCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource has been created.""" + + pass + + +class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource entity has been created.""" + + pass + + +class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read/Write enpoinds are changed.""" + + pass + + +class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read-only enpoinds are changed.""" + + pass + + +class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + resource_created = EventSource(ResourceCreatedEvent[TResourceProviderModel]) + resource_entity_created = EventSource(ResourceEntityCreatedEvent[TResourceProviderModel]) + endpoints_changed = EventSource(ResourceEndpointsChangedEvent[TResourceProviderModel]) + read_only_endpoints_changed = EventSource( + ResourceReadOnlyEndpointsChangedEvent[TResourceProviderModel] + ) + + +############################################################################## +# Event Handlers +############################################################################## + + +class EventHandlers(Object): + """Requires-side of the relation.""" + + component: Application | Unit + interface: RepositoryInterface + + def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ""): + """Manager of base client relations.""" + if not unique_key: + unique_key = relation_name + super().__init__(charm, unique_key) + + self.charm = charm + self.relation_name = relation_name + + self.framework.observe( + charm.on[self.relation_name].relation_changed, + self._on_relation_changed_event, + ) + + self.framework.observe( + self.charm.on[self.relation_name].relation_created, + self._on_relation_created_event, + ) + + self.framework.observe( + charm.on.secret_changed, + self._on_secret_changed_event, + ) + + @property + def relations(self) -> list[Relation]: + """Shortcut to get access to the relations.""" + return self.interface.relations + + # Event handlers + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + pass + + @abstractmethod + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _handle_event( + self, + ): + """Handles the event and reacts accordingly.""" + pass + + def compute_diff( + self, + relation: Relation, + request: RequirerCommonModel | ResourceProviderModel, + repository: AbstractRepository | None = None, + store: bool = True, + ) -> Diff: + """Computes, stores and returns a diff for that request.""" + if not repository: + repository = OpsRelationRepository(self.model, relation, component=relation.app) + + # Gets the data stored in the databag for diff computation + old_data = get_encoded_dict(relation, self.component, "data") + + # In case we're V1, we select specifically this request + if old_data and request.request_id: + old_data: dict | None = old_data.get(request.request_id, None) + + # dump the data of the current request so we can compare + new_data = request.model_dump( + mode="json", + exclude={"data"}, + context={"repository": repository}, + exclude_none=True, + exclude_defaults=True, + ) + + # Computes the diff + _diff = diff(old_data, new_data) + + if store: + # Update the databag with the new data for later diff computations + store_new_data(relation, self.component, new_data, short_uuid=request.request_id) + + return _diff + + +class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): + """Event Handler for resource provider.""" + + on = ResourceProvidesEvents[TRequirerCommonModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + request_model: type[TRequirerCommonModel], + unique_key: str = "", + mtls_enabled: bool = False, + bulk_event: bool = False, + ): + """Builds a resource provider event handler. + + Args: + charm: The charm. + relation_name: The relation name this event handler is listening to. + request_model: The request model that is expected to be received. + unique_key: An optional unique key for that object. + mtls_enabled: If True, means the server supports MTLS integration. + bulk_event: If this is true, only one event will be emitted with all requests in the case of a v1 requirer. + """ + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.app + self.request_model = request_model + self.interface = OpsRelationRepositoryInterface(charm, relation_name, request_model) + self.mtls_enabled = mtls_enabled + self.bulk_event = bulk_event + + @staticmethod + def _validate_diff(event: RelationEvent, _diff: Diff) -> None: + """Validates that entity information is not changed after relation is established. + + - When entity-type changes, backwards compatibility is broken. + - When extra-user-roles changes, role membership checks become incredibly complex. + - When extra-group-roles changes, role membership checks become incredibly complex. + """ + if not isinstance(event, RelationChangedEvent): + return + + for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: + if key in _diff.changed: + raise ValueError(f"Cannot change {key} after relation has already been created") + + def _dispatch_events(self, event: RelationEvent, _diff: Diff, request: RequirerCommonModel): + if self.mtls_enabled and "secret-mtls" in _diff.added: + getattr(self.on, "mtls_cert_updated").emit( + event.relation, app=event.app, unit=event.unit, request=request, old_mtls_cert=None + ) + return + # Emit a resource requested event if the setup key (resource name) + # was added to the relation databag, but the entity-type key was not. + if resource_added(_diff) and "entity-type" not in _diff.added: + getattr(self.on, "resource_requested").emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (resource name) + # was added to the relation databag, in addition to the entity-type key. + if resource_added(_diff) and "entity-type" in _diff.added: + getattr(self.on, "resource_entity_requested").emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (resource name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + not resource_added(_diff) + and "entity-type" not in _diff.added + and ("entity-permissions" in _diff.added or "entity-permissions" in _diff.changed) + ): + getattr(self.on, "resource_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit, request=request + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request: RequirerCommonModel, + ): + _diff = self.compute_diff(event.relation, request, repository) + + self._validate_diff(event, _diff) + self._dispatch_events(event, _diff, request) + + def _handle_bulk_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request_model: RequirerDataContractV1[TRequirerCommonModel], + ): + """Validate all the diffs, then dispatch the bulk event AND THEN stores the diff. + + This allows for the developer to process the diff and store it themselves + """ + for request in request_model.requests: + # Compute the diff withtout storing it so we can validate the diffs. + _diff = self.compute_diff(event.relation, request, repository, store=False) + self._validate_diff(event, _diff) + + getattr(self.on, "bulk_resources_requested").emit( + event.relation, app=event.app, unit=event.unit, requests=request_model.requests + ) + + # Store all the diffs if they were not already stored. + for request in request_model.requests: + new_data = request.model_dump( + mode="json", + exclude={"data"}, + context={"repository": repository}, + exclude_none=True, + exclude_defaults=True, + ) + store_new_data(event.relation, self.component, new_data, request.request_id) + + def _relation_from_secret_label(self, secret_label: str) -> Relation | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 3): + return + + try: + relation_id = int(contents[1]) + except ValueError: + return + + relation_name = contents[0] + + try: + return self.model.get_relation(relation_name, relation_id) + except ModelError: + return + + def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 5): + return + + return contents[2] + + @override + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + if not self.mtls_enabled: + logger.info("MTLS is disabled, exiting early.") + return + if not event.secret.label: + return + + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + old_mtls_cert = event.secret.get_content().get("mtls-cert") + logger.info("mtls-cert-updated") + + # V0, just fire the event. + if version == "v0": + request = build_model(repository, RequirerDataContractV0) + # V1, find the corresponding request. + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if not short_uuid: + return + for _request in request_model.requests: + if _request.request_id == short_uuid: + request = _request + break + else: + logger.info(f"Unknown request id {short_uuid}") + return + + getattr(self.on, "mtls_cert_updated").emit( + relation, + app=relation.app, + unit=remote_unit, + request=request, + mtls_cert=old_mtls_cert, + ) + + @override + def _on_relation_changed_event(self, event: RelationChangedEvent): + if not self.charm.unit.is_leader(): + return + + repository = OpsRelationRepository( + self.model, event.relation, component=event.relation.app + ) + + # Don't do anything until we get some data + if not repository.get_data(): + return + + version = repository.get_field("version") or "v0" + if version == "v0": + request_model = build_model(repository, RequirerDataContractV0) + old_name = request_model.original_field + request_model.request_id = None # For safety, let's ensure that we don't have a model. + self._handle_event(event, repository, request_model) + logger.info( + f"Patching databag for v0 compatibility: replacing 'resource' by '{old_name}'" + ) + self.interface.repository( + event.relation.id, + ).write_field(old_name, request_model.resource) + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if self.bulk_event: + self._handle_bulk_event(event, repository, request_model) + return + for request in request_model.requests: + self._handle_event(event, repository, request) + + def set_response(self, relation_id: int, response: ResourceProviderModel): + r"""Sets a response in the databag. + + This function will react accordingly to the version number. + If the version number is v0, then we write the data directly in the databag. + If the version number is v1, then we write the data in the list of responses. + + /!\ This function updates a response if it was already present in the databag! + + Args: + relation_id: The specific relation id for that event. + response: The response to write in the databag. + """ + if not self.charm.unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + raise ValueError("Missing relation.") + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + if version == "v0": + # Ensure the request_id is None + response.request_id = None + self.interface.write_model( + relation_id, response, context={"version": "v0"} + ) # {"database": "database-name", "secret-user": "uri", ...} + return + + model = self.interface.build_model(relation_id, DataContractV1[response.__class__]) + + # for/else syntax allows to execute the else if break was not called. + # This allows us to update or append easily. + for index, _response in enumerate(model.requests): + if _response.request_id == response.request_id: + model.requests[index] = response + break + else: + model.requests.append(response) + + self.interface.write_model(relation_id, model) + return + + +class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): + """Event Handler for resource requirer.""" + + on = ResourceRequiresEvents[TResourceProviderModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + requests: list[RequirerCommonModel], + response_model: type[TResourceProviderModel], + unique_key: str = "", + relation_aliases: list[str] | None = None, + ): + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.unit + self.relation_aliases = relation_aliases + self._requests = requests + self.response_model = DataContractV1[response_model] + self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( + OpsRelationRepositoryInterface(charm, relation_name, self.response_model) + ) + + if requests: + self._request_model = requests[0].__class__ + else: + self._request_model = RequirerCommonModel + + # First, check that the number of aliases matches the one defined in charm metadata. + if self.relation_aliases: + relation_connection_limit = self.charm.meta.requires[relation_name].limit + if len(self.relation_aliases) != relation_connection_limit: + raise ValueError( + f"Invalid number of aliases, expected {relation_connection_limit}, received {len(self.relation_aliases)}" + ) + + # Created custom event names for each alias. + if self.relation_aliases: + for relation_alias in self.relation_aliases: + self.on.define_event( + f"{relation_alias}_resource_created", + ResourceCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_resource_entity_created", + ResourceEntityCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_endpoints_changed", + ResourceEndpointsChangedEvent, + ) + self.on.define_event( + f"{relation_alias}_read_only_endpoints_changed", + ResourceReadOnlyEndpointsChangedEvent, + ) + + ############################################################################## + # Extra useful functions + ############################################################################## + def is_resource_created( + self, + rel_id: int, + request_id: str, + model: DataContractV1[TResourceProviderModel] | None = None, + ) -> bool: + """Checks if a resource has been created or not. + + Args: + rel_id: The relation id to check. + request_id: The specific request id to check. + model: An optional model to use (for performances). + """ + if not model: + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + for request in model.requests: + if request.request_id == request_id: + return request.secret_user is not None or request.secret_entity is not None + return False + + def are_all_resources_created(self, rel_id: int) -> bool: + """Checks that all resources have been created for a relation. + + Args: + rel_id: The relation id to check. + """ + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + return all( + self.is_resource_created(rel_id, request.request_id, model) + for request in model.requests + if request.request_id + ) + + ############################################################################## + # Helpers for aliases + ############################################################################## + + def _assign_relation_alias(self, relation_id: int) -> None: + """Assigns an alias to a relation. + + This function writes in the unit data bag. + + Args: + relation_id: the identifier for a particular relation. + """ + # If no aliases were provided, return immediately. + if not self.relation_aliases: + return + + # Return if an alias was already assigned to this relation + # (like when there are more than one unit joining the relation). + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation and relation.data[self.charm.unit].get("alias"): + return + + # Retrieve the available aliases (the ones that weren't assigned to any relation). + available_aliases = self.relation_aliases[:] + for relation in self.charm.model.relations[self.relation_name]: + alias = relation.data[self.charm.unit].get("alias") + if alias: + logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) + available_aliases.remove(alias) + + # Set the alias in the unit relation databag of the specific relation. + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation: + relation.data[self.charm.unit].update({"alias": available_aliases[0]}) + + # We need to set relation alias also on the application level so, + # it will be accessible in show-unit juju command, executed for a consumer application unit + if relation and self.charm.unit.is_leader(): + relation.data[self.charm.app].update({"alias": available_aliases[0]}) + + def _emit_aliased_event( + self, event: RelationChangedEvent, event_name: str, response: ResourceProviderModel + ): + """Emit all aliased events.""" + alias = self._get_relation_alias(event.relation.id) + if alias: + getattr(self.on, f"{alias}_{event_name}").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + + def _get_relation_alias(self, relation_id: int) -> str | None: + """Gets the relation alias for a relation id.""" + for relation in self.charm.model.relations[self.relation_name]: + if relation.id == relation_id: + return relation.data[self.charm.unit].get("alias") + return None + + ############################################################################## + # Event Handlers + ############################################################################## + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the database relation is created.""" + super()._on_relation_created_event(event) + + repository = OpsRelationRepository(self.model, event.relation, self.charm.app) + + # If relations aliases were provided, assign one to the relation. + self._assign_relation_alias(event.relation.id) + + if not self.charm.unit.is_leader(): + return + + # Generate all requests id so they are saved already. + for request in self._requests: + request.request_id = gen_hash(request.resource, request.salt) + + full_request = RequirerDataContractV1[self._request_model]( + version="v1", requests=self._requests + ) + write_model(repository, full_request) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the database relation has changed.""" + is_subordinate = False + remote_unit_data = None + for key in event.relation.data.keys(): + if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): + remote_unit_data = event.relation.data[key] + elif isinstance(key, Application) and key.name != self.charm.app.name: + is_subordinate = event.relation.data[key].get("subordinated") == "true" + + if is_subordinate: + if not remote_unit_data or remote_unit_data.get("state") != "ready": + return + + repository = self.interface.repository(event.relation.id, event.app) + response_model = self.interface.build_model(event.relation.id, component=event.app) + + if not response_model.requests: + logger.info("Still waiting for data.") + return + + data = repository.get_field("data") + if not data: + logger.info("Missing data to compute diffs") + return + + request_map = TypeAdapter(dict[str, self._request_model]).validate_json(data) + + for response in response_model.requests: + response_id = response.request_id or gen_hash(response.resource, response.salt) + request = request_map.get(response_id, None) + if not request: + raise ValueError( + f"No request matching the response with response_id {response_id}" + ) + self._handle_event(event, repository, request, response) + + ############################################################################## + # Methods to handle specificities of relation events + ############################################################################## + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: OpsRelationRepository, + request: RequirerCommonModel, + response: ResourceProviderModel, + ): + _diff = self.compute_diff(event.relation, response, repository, store=True) + + for newval in _diff.added: + if secret_group := response._get_secret_field(newval): + uri = getattr(response, newval.replace("-", "_")) + repository.register_secret(uri, secret_group, response.request_id) + + if "secret-user" in _diff.added and not request.entity_type: + logger.info(f"resource {response.resource} created at {datetime.now()}") + getattr(self.on, "resource_created").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "resource_created", response) + return + + if "secret-entity" in _diff.added and request.entity_type: + logger.info(f"entity {response.entity_name} created at {datetime.now()}") + getattr(self.on, "resource_entity_created").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "resource_entity_created", response) + return + + if "endpoints" in _diff.added or "endpoints" in _diff.changed: + logger.info(f"endpoints changed at {datetime.now()}") + getattr(self.on, "endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "endpoints_changed", response) + return + + if "read-only-endpoints" in _diff.added or "read-only-endpoints" in _diff.changed: + logger.info(f"read-only-endpoints changed at {datetime.now()}") + getattr(self.on, "read_only_endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "read_only_endpoints_changed", response) + return diff --git a/tests/v1/unit/test_data_interfaces.py b/tests/v1/unit/test_data_interfaces.py new file mode 100644 index 00000000..d0d41992 --- /dev/null +++ b/tests/v1/unit/test_data_interfaces.py @@ -0,0 +1,1870 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. +import json +import logging +import re +import unittest +from abc import ABC, abstractmethod +from logging import getLogger +from typing import Annotated, Dict, Tuple, Type +from unittest.mock import Mock, patch + +import pytest +from ops.charm import CharmBase +from ops.model import Relation, Unit +from ops.testing import Harness +from parameterized import parameterized +from pydantic import Field, SecretStr, TypeAdapter, ValidationError + +from charms.data_platform_libs.v0.data_interfaces import ( + PROV_SECRET_PREFIX, +) +from charms.data_platform_libs.v1.data_interfaces import ( + Diff, + EntityPermissionModel, + ExtraSecretStr, + KafkaRequestModel, + OpsOtherPeerUnitRepository, + OpsPeerRepositoryInterface, + OpsPeerUnitRepositoryInterface, + OpsRepository, + OptionalSecretStr, + PeerModel, + RepositoryInterface, + RequirerCommonModel, + ResourceCreatedEvent, + ResourceEndpointsChangedEvent, + ResourceProviderEventHandler, + ResourceProviderModel, + ResourceReadOnlyEndpointsChangedEvent, + ResourceRequestedEvent, + ResourceRequirerEventHandler, + ResourceRequiresEvents, + SecretGroup, + TCommon, + TRepository, +) +from charms.harness_extensions.v0.capture_events import capture, capture_events + +logger = getLogger(__name__) + +ENTITY_GROUP = "GROUP" +ENTITY_USER = "USER" + +PEER_RELATION_NAME = "database-peers" + +DATABASE = "data_platform" +ENTITY_PERMISSIONS = [ + {"resource_name": "cars", "resource_type": "TABLE", "privileges": ["SELECT"]} +] +EXTRA_USER_ROLES: str = "CREATEDB,CREATEROLE" +EXTRA_GROUP_ROLES: str = "CUSTOM_ROLE_1,CUSTOM_ROLE_2" +DATABASE_RELATION_INTERFACE = "database_client" +DATABASE_RELATION_NAME = "database" +DATABASE_METADATA = f""" +name: database + +peers: + database-peers: + interface: database-peers + +provides: + {DATABASE_RELATION_NAME}: + interface: {DATABASE_RELATION_INTERFACE} +""" + + +CLUSTER_ALIASES = ["cluster1", "cluster2"] +METADATA = f""" +name: application +requires: + {DATABASE_RELATION_NAME}: + interface: {DATABASE_RELATION_INTERFACE} + limit: {len(CLUSTER_ALIASES)} +""" + +# +# Helper functions +# + + +def verify_relation_interface_functions( + interface: RepositoryInterface[TRepository, TCommon], relation_id: int +): + """This function is used to verify that the 3 main interface functions work correctly.""" + repository = interface.repository(relation_id) + for field in ["something", "secret-field"]: + # Interface function: write_field + repository.write_field(field=field, value="else") + + # Interface function: get_field + assert repository.get_field(field) == "else" + + # Interface function: get_fields + assert repository.get_fields(field) == {field: "else"} + + # Interface function: delete_field + repository.delete_field(field) + + assert repository.get_field(field) is None + rel_data = repository.get_fields(field) + assert rel_data == {} + + +# +# Test CharmsOptionalSecretStr +# + +MygroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] + + +class PeerAppModel(PeerModel): + secret_field_app: ExtraSecretStr = Field(alias="secret-field-app") + secret_field: ExtraSecretStr = Field(alias="secret-field") + mysecret1: MygroupSecretStr + mysecret2: MygroupSecretStr + + +class PeerUnitModel(PeerModel): + secret_field_unit: ExtraSecretStr = Field(alias="secret-field-unit") + secret_field: ExtraSecretStr = Field(alias="secret-field") + mysecret1: MygroupSecretStr + mysecret2: MygroupSecretStr + + +class DatabaseCharm(CharmBase): + """Mock database charm to use in units tests.""" + + def __init__(self, *args): + super().__init__(*args) + self.peer_relation_app = OpsPeerRepositoryInterface( + self, PEER_RELATION_NAME, model=PeerAppModel + ) + self.peer_relation_unit = OpsPeerUnitRepositoryInterface( + self, PEER_RELATION_NAME, model=PeerUnitModel + ) + self.provider = ResourceProviderEventHandler( + self, DATABASE_RELATION_NAME, RequirerCommonModel + ) + self._servers_data = {} + self.framework.observe( + self.provider.on.resource_requested, + self._on_resource_requested, + ) + self.framework.observe( + self.provider.on.resource_entity_requested, + self._on_resource_entity_requested, + ) + self.framework.observe( + self.provider.on.resource_entity_permissions_changed, + self._on_resource_entity_permissions_changed, + ) + + @property + def peer_relation(self) -> Relation | None: + """The cluster peer relation.""" + return self.model.get_relation(PEER_RELATION_NAME) + + @property + def peer_units_data_interfaces(self) -> Dict[Unit, OpsOtherPeerUnitRepository]: + """The cluster peer relation.""" + if not self.peer_relation or not self.peer_relation.units: + return {} + + for unit in self.peer_relation.units: + if unit not in self._servers_data: + self._servers_data[unit] = OpsOtherPeerUnitRepository( + self.model, relation=self.peer_relation, component=unit + ) + return self._servers_data + + def _on_resource_requested(self, _) -> None: + pass + + def _on_resource_entity_requested(self, _) -> None: + pass + + def _on_resource_entity_permissions_changed(self, _) -> None: + pass + + +class DatabaseCharmDynamicSecrets(CharmBase): + """Mock database charm to use in units tests.""" + + def __init__(self, *args): + super().__init__(*args) + self.peer_relation_app = OpsPeerRepositoryInterface( + self, PEER_RELATION_NAME, model=PeerAppModel + ) + self.peer_relation_unit = OpsPeerUnitRepositoryInterface( + self, PEER_RELATION_NAME, model=PeerUnitModel + ) + + @property + def peer_relation(self) -> Relation | None: + """The cluster peer relation.""" + return self.model.get_relation(PEER_RELATION_NAME) + + +# +# Tests +# + + +class DataProvidesBaseTests(ABC): + SECRET_FIELDS = [ + "username", + "password", + "tls", + "tls-ca", + "uris", + "read-only-uris", + "entity-name", + "entity-password", + ] + + DATABASE_FIELD = "resource" + + app_name: str + relation_name: str + + @pytest.fixture + def use_caplog(self, caplog): + self._caplog = caplog + + @abstractmethod + def get_harness(self) -> Tuple[Harness, int]: + pass + + def setUp(self): + self.harness, self.rel_id = self.get_harness() + + def tearDown(self) -> None: + self.harness.cleanup() + + def test_diff(self): + """Asserts that the charm library correctly returns a diff of the relation data.""" + # Define a mock relation changed event to be used in the subsequent diff calls. + relation = self.harness.model.get_relation(self.relation_name, self.rel_id) + # Use a variable to easily update the relation changed event data during the test. + + data_model = ResourceProviderModel.model_validate( + { + "resource": "blah", + "request-id": "", + "username": "test-username", + "password": "test-password", + } + ) + # Test with new data added to the relation databag. + result = self.harness.charm.provider.compute_diff(relation, request=data_model, store=True) + assert result == Diff({"request-id", "salt", "resource", "secret-user"}, set(), set()) + + # Test with the same data. + result = self.harness.charm.provider.compute_diff(relation, request=data_model, store=True) + assert result == Diff(set(), set(), set()) + + # Test with changed data. + data_model.resource = "bluh" + result = self.harness.charm.provider.compute_diff(relation, request=data_model, store=True) + assert result == Diff(set(), {"resource"}, set()) + + # Test with deleted data. + del data_model.secret_user + result = self.harness.charm.provider.compute_diff(relation, request=data_model, store=True) + assert result == Diff(set(), set(), {"secret-user"}) + + def test_relation_interface(self): + """Check the functionality of each public interface function.""" + # We pretend that the connection is initialized + self.harness.update_relation_data( + self.rel_id, "application", {self.DATABASE_FIELD: DATABASE} + ) + + interface = self.harness.charm.provider.interface + verify_relation_interface_functions(interface, self.rel_id) + + def test_set_credentials_secrets(self): + """Asserts that credentials are set up as secrets if possible.""" + # Set some data in the relation. + self.harness.update_relation_data( + self.rel_id, + "application", + { + "version": "v1", + "requests": json.dumps( + [ + { + self.DATABASE_FIELD: DATABASE, + "request-id": "c759221a6c14c72a", + "salt": "kkkkkkkk", + } + ] + ), + }, + ) + response = ResourceProviderModel( + salt="kkkkkkkk", + request_id="c759221a6c14c72a", + resource=DATABASE, + username=SecretStr("test-username"), + password=SecretStr("test-password"), + ) + + # Set the credentials in the relation using the provides charm library. + self.harness.charm.provider.set_response(self.rel_id, response) + + # Check that the credentials are present in the relation. + relation = self.harness.get_relation_data(self.rel_id, self.app_name) + assert json.loads(relation["data"]) == ( + { + "c759221a6c14c72a": { + self.DATABASE_FIELD: DATABASE, + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + } + } + ) + + requests = json.loads(relation["requests"]) + secret_id = requests[0][f"{PROV_SECRET_PREFIX}user"] + secret = self.harness.charm.model.get_secret(id=secret_id) + + assert secret.get_content(refresh=True) == { + "username": "test-username", + "password": "test-password", + } + + @pytest.mark.usefixtures("only_with_juju_secrets") + def test_set_entity_credentials(self): + """Asserts that the database name is in the relation databag when it's requested.""" + # Set some data in the relation. + self.harness.update_relation_data( + self.rel_id, + "application", + { + "version": "v1", + "requests": json.dumps( + [ + { + self.DATABASE_FIELD: DATABASE, + "request-id": "c759221a6c14c72a", + "salt": "kkkkkkkk", + "entity-type": "USER", + } + ] + ), + }, + ) + + # Set the entity credentials in the relation using the provides charm library. + response = ResourceProviderModel( + salt="kkkkkkkk", + request_id="c759221a6c14c72a", + resource=DATABASE, + entity_name=SecretStr("test-name"), # pyright: ignore[reportCallIssue] + entity_password=SecretStr("test-password"), # pyright: ignore[reportCallIssue] + ) + + # Set the credentials in the relation using the provides charm library. + self.harness.charm.provider.set_response(self.rel_id, response) + + relation = self.harness.get_relation_data(self.rel_id, self.app_name) + + # Check that the entity credentials are present in the relation. + assert relation["data"] == json.dumps( + { + "c759221a6c14c72a": { + self.DATABASE_FIELD: DATABASE, + "request-id": "c759221a6c14c72a", + "salt": "kkkkkkkk", + "entity-type": "USER", + } + } + ) + + requests = json.loads(relation["requests"]) + secret_id = requests[0][f"{PROV_SECRET_PREFIX}entity"] + secret = self.harness.charm.model.get_secret(id=secret_id) + + assert secret.get_content(refresh=True) == { + "entity-name": "test-name", + "entity-password": "test-password", + } + + +class TestDatabaseProvides(DataProvidesBaseTests, unittest.TestCase): + metadata = DATABASE_METADATA + relation_name = DATABASE_RELATION_NAME + app_name = "database" + charm = DatabaseCharm + + def get_harness(self) -> Tuple[Harness, int]: + harness = Harness(self.charm, meta=self.metadata) + # Set up the initial relation and hooks. + rel_id = harness.add_relation(self.relation_name, "application") + peer_rel_id = harness.add_relation(PEER_RELATION_NAME, self.app_name) + harness.add_relation_unit(peer_rel_id, f"{self.app_name}/1") + harness.add_relation_unit(peer_rel_id, f"{self.app_name}/2") + + harness.add_relation_unit(rel_id, "application/0") + harness.set_leader(True) + harness.begin_with_initial_hooks() + return harness, rel_id + + # + # Peer Data tests + # + def test_other_peer_relation_disabled_functions(self): + """Verify that fetch_relation_data/field() functions are disabled for Peer Relations.""" + for _, repository in self.harness.charm.peer_units_data_interfaces.items(): + with pytest.raises(NotImplementedError): + repository.write_field("key", "value") + + with pytest.raises(NotImplementedError): + repository.delete_field("key") + + @parameterized.expand([("peer_relation_app",), ("peer_relation_unit",)]) + def test_peer_relation_interface(self, interface_attr): + """Check the functionality of each public interface function.""" + interface = getattr(self.harness.charm, interface_attr) + verify_relation_interface_functions(interface, self.harness.charm.peer_relation.id) + + @parameterized.expand([("peer_relation_app",), ("peer_relation_unit",)]) + def test_peer_relation_interface_secret_fields(self, interface_attr): + """Check the functionality of each public interface function.""" + relation_id: int = self.harness.charm.peer_relation.id + interface: RepositoryInterface = getattr(self.harness.charm, interface_attr) + + model = interface.build_model(relation_id) + + model.secret_field = "bla" + model.mysecret1 = "bla" + + interface.write_model(relation_id, model) + + repository = interface.repository(relation_id) + assert repository.get_secret_field("secret-field", "extra") == "bla" + assert repository.get_secret_field("mysecret1", "mygroup") == "bla" + + @parameterized.expand([("peer_relation_app",), ("peer_relation_unit",)]) + @pytest.mark.usefixtures("only_with_juju_secrets") + def test_peer_relation_secret_secret_revision(self, interface_attr): + """Check the functionality of each public interface function.""" + # Given + relation_id: int = self.harness.charm.peer_relation.id + interface: RepositoryInterface = getattr(self.harness.charm, interface_attr) + repository = interface.repository(relation_id) + + scope = interface_attr.split("_")[2] + scope_opj = getattr(self.harness.charm, scope) + secret = scope_opj.add_secret( + {"secret-field": "initialvalue"}, label=f"{PEER_RELATION_NAME}.database.{scope}" + ) + cached_secret = repository.secrets.get(label=f"{PEER_RELATION_NAME}.database.{scope}") + + initial_secret_revision = secret.get_info().revision + initial_cached_secret_revision = cached_secret.meta.get_info().revision + + # When + repository.write_secret_field("secret-field", "initialvalue", "extra") + secret.get_content(refresh=True) + + unchanged_secret_revision = secret.get_info().revision + unchanged_cached_secret_revision = cached_secret.meta.get_info().revision + + repository.write_secret_field("secret-field", "newvalue", "extra") + secret.get_content(refresh=True) + + changed_secret_revision = secret.get_info().revision + changed_cached_secret_revision = cached_secret.meta.get_info().revision + + # Then + assert ( + initial_secret_revision + == initial_cached_secret_revision + == unchanged_secret_revision + == unchanged_cached_secret_revision + ) + assert changed_secret_revision == unchanged_secret_revision + 1 + assert changed_cached_secret_revision == unchanged_cached_secret_revision + 1 + + def test_peer_relation_other_unit(self): + """Check the functionality of each public interface function on each "other" unit.""" + relation_id = self.harness.charm.peer_relation.id + for unit, interface in self.harness.charm.peer_units_data_interfaces.items(): + interface: OpsOtherPeerUnitRepository + self.harness.update_relation_data(relation_id, unit.name, {"something": "else"}) + + # fetch_relation_field() + assert interface.get_field("something") == "else" + + # fetch_relation_data() + rel_data = interface.get_fields("something") + assert rel_data["something"] == "else" + + assert interface.get_field("non-existent-field") is None + rel_data = interface.get_fields("non-existent-field") + assert rel_data == {} + + def test_peer_relation_other_unit_dict(self): + """Check the functionality of each public interface function on each "other" unit.""" + relation_id = self.harness.charm.peer_relation.id + for unit, interface in self.harness.charm.peer_units_data_interfaces.items(): + interface: OpsOtherPeerUnitRepository + self.harness.update_relation_data(relation_id, unit.name, {"something": "else"}) + + # fetch_relation_field() + assert interface.get_field("something") == "else" + + # fetch_relation_data() + rel_data = interface.get_data() + assert rel_data + assert rel_data["something"] == "else" + + with pytest.raises(KeyError): + assert rel_data["non-existent-field"] + assert rel_data.get("non-existent-field") is None + + # + # Relation Data tests + # + @patch.object(DatabaseCharm, "_on_resource_requested") + def test_on_resource_requested_v0(self, _on_resource_requested): + """Asserts that the correct hook is called when a new database is requested.""" + # Simulate the request of a new database plus extra user roles. + self.harness.update_relation_data( + self.rel_id, + "application", + { + self.DATABASE_FIELD: DATABASE, + "extra-user-roles": EXTRA_USER_ROLES, + "external-node-connectivity": "true", + }, + ) + + # Assert the correct hook is called. + _on_resource_requested.assert_called_once() + + # Assert the database name and the entity info are accessible in the providers charm library event. + event = _on_resource_requested.call_args[0][0] + assert event.request.resource == DATABASE + assert event.request.extra_user_roles == EXTRA_USER_ROLES + assert event.request.external_node_connectivity is True + + # Reset the mock call count. + _on_resource_requested.reset_mock() + + # Simulate the request of a new database entity. + self.harness.update_relation_data( + self.rel_id, + "application", + {self.DATABASE_FIELD: DATABASE, "entity-type": ENTITY_USER}, + ) + + # Assert the correct hook is called. + _on_resource_requested.assert_not_called() + + @patch.object(DatabaseCharm, "_on_resource_requested") + def test_on_resource_requested_v1(self, _on_resource_requested): + """Asserts that the correct hook is called when a new database is requested.""" + # Simulate the request of a new database plus extra user roles. + self.harness.update_relation_data( + self.rel_id, + "application", + { + "version": "v1", + "requests": json.dumps( + [ + { + self.DATABASE_FIELD: DATABASE, + "extra-user-roles": EXTRA_USER_ROLES, + "external-node-connectivity": True, + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + } + ] + ), + }, + ) + + # Assert the correct hook is called. + _on_resource_requested.assert_called_once() + + # Assert the database name and the entity info are accessible in the providers charm library event. + event = _on_resource_requested.call_args[0][0] + assert event.request.resource == DATABASE + assert event.request.extra_user_roles == EXTRA_USER_ROLES + assert event.request.external_node_connectivity is True + + # Reset the mock call count. + _on_resource_requested.reset_mock() + + # Simulate the request of a new database entity. + self.harness.update_relation_data( + self.rel_id, + "application", + { + "version": "v1", + "requests": json.dumps( + [ + { + self.DATABASE_FIELD: DATABASE, + "entity-type": ENTITY_USER, + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + } + ] + ), + }, + ) + + # Assert the correct hook is called. + _on_resource_requested.assert_not_called() + + @patch.object(DatabaseCharm, "_on_resource_entity_requested") + def test_on_resource_entity_requested_v0(self, _on_resource_entity_requested): + """Asserts that the correct hook is called when a new database entity is requested.""" + # Simulate the request of a new user plus extra roles. + self.harness.update_relation_data( + self.rel_id, + "application", + { + self.DATABASE_FIELD: DATABASE, + "entity-type": ENTITY_USER, + "entity-permissions": json.dumps(ENTITY_PERMISSIONS), + "extra-user-roles": json.dumps(EXTRA_USER_ROLES), + }, + ) + + # Assert the correct hook is called. + _on_resource_entity_requested.assert_called_once() + + # Assert the database name and the entity info are accessible in the providers charm library event. + event = _on_resource_entity_requested.call_args[0][0] + assert event.request.resource == DATABASE + assert event.request.entity_type == ENTITY_USER + assert [ + item.model_dump() for item in event.request.entity_permissions + ] == ENTITY_PERMISSIONS + assert event.request.extra_user_roles == EXTRA_USER_ROLES + + # Reset the relation data keys + mock count + self.harness.update_relation_data(self.rel_id, self.app_name, {"data": "{}"}) + _on_resource_entity_requested.reset_mock() + + # Simulate the request of a new group plus extra roles. + self.harness.update_relation_data( + self.rel_id, + "application", + { + self.DATABASE_FIELD: DATABASE, + "entity-type": ENTITY_GROUP, + "entity-permissions": json.dumps(ENTITY_PERMISSIONS), + "extra-user-roles": "", + "extra-group-roles": EXTRA_GROUP_ROLES, + }, + ) + + # Assert the correct hook is called. + _on_resource_entity_requested.assert_called_once() + + # Assert the database name and the entity info are accessible in the providers charm library event. + event = _on_resource_entity_requested.call_args[0][0] + assert event.request.resource == DATABASE + assert event.request.entity_type == ENTITY_GROUP + assert [ + item.model_dump() for item in event.request.entity_permissions + ] == ENTITY_PERMISSIONS + assert event.request.extra_group_roles == EXTRA_GROUP_ROLES + + @patch.object(DatabaseCharm, "_on_resource_entity_requested") + def test_on_resource_entity_requested_v1(self, _on_resource_entity_requested): + """Asserts that the correct hook is called when a new database entity is requested.""" + # Simulate the request of a new user plus extra roles. + self.harness.update_relation_data( + self.rel_id, + "application", + { + "version": "v1", + "requests": json.dumps( + [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + self.DATABASE_FIELD: DATABASE, + "entity-type": ENTITY_USER, + "entity-permissions": ENTITY_PERMISSIONS, + "extra-user-roles": EXTRA_USER_ROLES, + } + ] + ), + }, + ) + + # Assert the correct hook is called. + _on_resource_entity_requested.assert_called_once() + + # Assert the database name and the entity info are accessible in the providers charm library event. + event = _on_resource_entity_requested.call_args[0][0] + assert event.request.resource == DATABASE + assert event.request.entity_type == ENTITY_USER + assert [ + item.model_dump() for item in event.request.entity_permissions + ] == ENTITY_PERMISSIONS + assert event.request.extra_user_roles == EXTRA_USER_ROLES + + # Reset the relation data keys + mock count + self.harness.update_relation_data(self.rel_id, self.app_name, {"data": "{}"}) + _on_resource_entity_requested.reset_mock() + + # Simulate the request of a new group plus extra roles. + self.harness.update_relation_data( + self.rel_id, + "application", + { + "version": "v1", + "requests": json.dumps( + [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + self.DATABASE_FIELD: DATABASE, + "entity-type": ENTITY_GROUP, + "entity-permissions": ENTITY_PERMISSIONS, + "extra-user-roles": "", + "extra-group-roles": EXTRA_GROUP_ROLES, + } + ] + ), + }, + ) + + # Assert the correct hook is called. + _on_resource_entity_requested.assert_called_once() + + # Assert the database name and the entity info are accessible in the providers charm library event. + event = _on_resource_entity_requested.call_args[0][0] + assert event.request.resource == DATABASE + assert event.request.entity_type == ENTITY_GROUP + assert [ + item.model_dump() for item in event.request.entity_permissions + ] == ENTITY_PERMISSIONS + assert event.request.extra_group_roles == EXTRA_GROUP_ROLES + + @patch.object(DatabaseCharm, "_on_resource_entity_permissions_changed") + def test_on_entity_permissions_changed_v0(self, _on_resource_entity_permissions_changed): + """Asserts that the correct hook is called when entity permissions are changed.""" + # Simulate the request of a new user plus extra roles. + self.harness.update_relation_data( + self.rel_id, + "application", + { + self.DATABASE_FIELD: DATABASE, + "entity-type": ENTITY_USER, + "entity-permissions": "", + "extra-user-roles": EXTRA_USER_ROLES, + }, + ) + + # Simulate the request to update user permissions. + self.harness.update_relation_data( + self.rel_id, + "application", + { + self.DATABASE_FIELD: DATABASE, + "entity-type": ENTITY_USER, + "entity-permissions": json.dumps(ENTITY_PERMISSIONS), + "extra-user-roles": EXTRA_USER_ROLES, + }, + ) + + # Assert the correct hook is called. + _on_resource_entity_permissions_changed.assert_called_once() + + # Assert the database name and the entity info are accessible in the providers charm library event. + event = _on_resource_entity_permissions_changed.call_args[0][0] + assert event.request.resource == DATABASE + assert event.request.entity_type == ENTITY_USER + assert event.request.entity_permissions == TypeAdapter( + list[EntityPermissionModel] + ).validate_python(ENTITY_PERMISSIONS) + assert event.request.extra_user_roles == EXTRA_USER_ROLES + + @patch.object(DatabaseCharm, "_on_resource_entity_permissions_changed") + def test_on_entity_permissions_changed_v1(self, _on_resource_entity_permissions_changed): + """Asserts that the correct hook is called when entity permissions are changed.""" + # Simulate the request of a new user plus extra roles. + self.harness.update_relation_data( + self.rel_id, + "application", + { + "version": "v1", + "requests": json.dumps( + [ + { + self.DATABASE_FIELD: DATABASE, + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "entity-type": ENTITY_USER, + "extra-user-roles": EXTRA_USER_ROLES, + } + ] + ), + }, + ) + + # Simulate the request to update user permissions. + self.harness.update_relation_data( + self.rel_id, + "application", + { + "version": "v1", + "requests": json.dumps( + [ + { + self.DATABASE_FIELD: DATABASE, + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "entity-type": ENTITY_USER, + "entity-permissions": ENTITY_PERMISSIONS, + "extra-user-roles": EXTRA_USER_ROLES, + } + ] + ), + }, + ) + + # Assert the correct hook is called. + _on_resource_entity_permissions_changed.assert_called_once() + + # Assert the database name and the entity info are accessible in the providers charm library event. + event = _on_resource_entity_permissions_changed.call_args[0][0] + assert event.request.resource == DATABASE + assert event.request.entity_type == ENTITY_USER + assert event.request.entity_permissions == TypeAdapter( + list[EntityPermissionModel] + ).validate_python(ENTITY_PERMISSIONS) + assert event.request.extra_user_roles == EXTRA_USER_ROLES + + def test_database_requested_event(self): + # Test custom event creation + + # Test the event being emitted by the application. + with capture(self.harness.charm, ResourceRequestedEvent) as captured: + self.harness.update_relation_data(self.rel_id, "application", {"database": DATABASE}) + assert captured.event.app.name == "application" + + # Reset the diff data to trigger the event again later. + self.harness.update_relation_data(self.rel_id, "database", {"data": "{}"}) + + # Test the event being emitted by the unit. + with capture(self.harness.charm, ResourceRequestedEvent) as captured: + self.harness.update_relation_data(self.rel_id, "application/0", {"database": DATABASE}) + assert captured.event.unit.name == "application/0" + + +class TestDatabaseProvidesDynamicSecrets(ABC, unittest.TestCase): + metadata = DATABASE_METADATA + relation_name = DATABASE_RELATION_NAME + app_name = "database" + charm = DatabaseCharmDynamicSecrets + + def get_harness(self) -> Harness: + harness = Harness(self.charm, meta=self.metadata) + # Set up the initial relation and hooks. + peer_rel_id = harness.add_relation(PEER_RELATION_NAME, self.app_name) + harness.add_relation_unit(peer_rel_id, f"{self.app_name}/1") + harness.add_relation_unit(peer_rel_id, f"{self.app_name}/2") + harness.set_leader(True) + harness.begin_with_initial_hooks() + return harness + + def setUp(self): + self.harness = self.get_harness() + + def tearDown(self) -> None: + self.harness.cleanup() + + # + # Peer Data tests + # + @parameterized.expand([("app",), ("unit",)]) + def test_peer_relation_interface(self, scope): + """Check the functionality of each public interface function.""" + interface: OpsPeerRepositoryInterface = getattr( + self.harness.charm, f"peer_relation_{scope}" + ) + relation_id = self.harness.charm.peer_relation.id + + repository = interface.repository(relation_id) + + # set_secret() + repository.write_secret_field("something", "else", SecretGroup("extra")) + + secret = self.harness.charm.model.get_secret( + label=f"{PEER_RELATION_NAME}.database.{scope}" + ) + assert "something" in secret.get_content() + + # get_secret() + assert repository.get_secret_field("something", SecretGroup("extra")) == "else" + + # delete_secret() + repository.delete_secret_field("something", SecretGroup("extra")) + + assert repository.get_secret_field("something", SecretGroup("extra")) is None + + +class ApplicationCharmDatabase(CharmBase): + """Mock application charm to use in units tests.""" + + def __init__(self, *args): + super().__init__(*args) + self.requirer = ResourceRequirerEventHandler( + self, + relation_name=DATABASE_RELATION_NAME, + requests=[ + RequirerCommonModel( + resource=DATABASE, extra_user_roles=EXTRA_USER_ROLES, salt="kkkkkkkk" + ), + RequirerCommonModel(resource="", entity_type="USER", salt="xxxxxxxx"), + ], + relation_aliases=CLUSTER_ALIASES, + response_model=ResourceProviderModel, + ) + self.framework.observe( + self.requirer.on.resource_created, + self._on_resource_created, + ) + self.framework.observe( + self.requirer.on.resource_entity_created, + self._on_resource_entity_created, + ) + self.framework.observe( + self.on[DATABASE_RELATION_NAME].relation_broken, self._on_relation_broken + ) + self.framework.observe(self.requirer.on.endpoints_changed, self._on_endpoints_changed) + self.framework.observe( + self.requirer.on.read_only_endpoints_changed, + self._on_read_only_endpoints_changed, + ) + self.framework.observe( + self.requirer.on.cluster1_resource_created, + self._on_cluster1_resource_created, + ) + + def log_relation_size(self, prefix=""): + logger.info(f"§{prefix} relations: {len(self.requirer.interface.relations)}") + + @staticmethod + def get_relation_size(log_message: str) -> int: + num_of_relations = ( + re.search(r"relations: [0-9]*", log_message) + .group(0) + .replace("relations: ", "") + .strip() + ) + + return int(num_of_relations) + + @staticmethod + def get_prefix(log_message: str) -> str: + return ( + re.search(r"§.* relations:", log_message) + .group(0) + .replace("relations:", "") + .replace("§", "") + .strip() + ) + + def _on_resource_created(self, _) -> None: + self.log_relation_size("on_resource_created") + + def _on_resource_entity_created(self, _) -> None: + self.log_relation_size("on_resource_entity_created") + + def _on_relation_broken(self, event) -> None: + # This should not raise errors + self.requirer.interface.repository(event.relation.id).get_data() + + self.log_relation_size("on_relation_broken") + + def _on_endpoints_changed(self, _) -> None: + self.log_relation_size("on_endpoints_changed") + + def _on_read_only_endpoints_changed(self, _) -> None: + self.log_relation_size("on_read_only_endpoints_changed") + + def _on_cluster1_resource_created(self, _) -> None: + self.log_relation_size("on_cluster1_resource_created") + + +@pytest.fixture(autouse=True) +def reset_aliases(): + """Fixture that runs before each test to delete the custom events created for the aliases. + + This is needed because the events are created again in the next test, + which causes an error related to duplicated events. + """ + for cluster_alias in CLUSTER_ALIASES: + try: + delattr(ResourceRequiresEvents, f"{cluster_alias}_resource_created") + delattr(ResourceRequiresEvents, f"{cluster_alias}_resource_entity_created") + delattr(ResourceRequiresEvents, f"{cluster_alias}_endpoints_changed") + delattr(ResourceRequiresEvents, f"{cluster_alias}_read_only_endpoints_changed") + except AttributeError: + # Ignore the events not existing before the first test. + pass + + +class DataRequirerBaseTests(ABC): + metadata: str + relation_name: str + app_name: str + charm: Type[CharmBase] + + rel_id: int + + @pytest.fixture + def use_caplog(self, caplog): + self._caplog = caplog + + def get_harness(self) -> Harness: + harness = Harness(self.charm, meta=self.metadata) + harness.set_leader(True) + return harness + + def add_relation(self, harness: Harness, app_name: str) -> int: + rel_id = harness.add_relation(self.relation_name, app_name) + harness.add_relation_unit(rel_id, f"{app_name}/0") + return rel_id + + def setUp(self): + self.harness = self.get_harness() + self.harness.begin_with_initial_hooks() + + def tearDown(self) -> None: + self.harness.cleanup() + + def test_diff(self): + """Asserts that the charm library correctly returns a diff of the relation data.""" + # Define a mock relation changed event to be used in the subsequent diff calls. + application = "data-platform" + + rel_id = self.add_relation(self.harness, application) + relation = self.harness.model.get_relation(self.relation_name, rel_id) + + data_model = ResourceProviderModel.model_validate( + { + "resource": "blah", + "request-id": "", + "username": "test-username", + "password": "test-password", + } + ) + + # Test with new data added to the relation databag. + result = self.harness.charm.requirer.compute_diff(relation, request=data_model, store=True) + assert result == Diff({"request-id", "salt", "resource", "secret-user"}, set(), set()) + + # Test with the same data. + result = self.harness.charm.requirer.compute_diff(relation, request=data_model, store=True) + assert result == Diff(set(), set(), set()) + + # Test with changed data. + data_model.resource = "bluh" + result = self.harness.charm.requirer.compute_diff(relation, request=data_model, store=True) + assert result == Diff(set(), {"resource"}, set()) + + # Test with deleted data. + del data_model.secret_user + result = self.harness.charm.requirer.compute_diff(relation, request=data_model, store=True) + assert result == Diff(set(), set(), {"secret-user"}) + + def test_relation_interface(self): + """Check the functionality of each public interface function.""" + interface = self.harness.charm.requirer.interface + verify_relation_interface_functions(interface, self.rel_id) + + def test_relation_interface_consistency(self): + """Check the consistency of the public interface init function.""" + with pytest.raises(ValueError): + RequirerCommonModel(entity_type="INVALID_ROLE_TYPE") + with pytest.raises(ValueError): + RequirerCommonModel(entity_type="USER", extra_group_roles=EXTRA_GROUP_ROLES) + with pytest.raises(ValueError): + RequirerCommonModel(entity_type="GROUP", extra_user_roles=EXTRA_USER_ROLES) + with pytest.raises(ValidationError): + KafkaRequestModel(consumer_group_prefix="*") + + +class TestDatabaseRequiresNoRelations(DataRequirerBaseTests, unittest.TestCase): + metadata = METADATA + relation_name = DATABASE_RELATION_NAME + charm = ApplicationCharmDatabase + + app_name = "application" + provider = "database" + + def setUp(self): + self.harness = self.get_harness() + self.harness.begin_with_initial_hooks() + + def test_relation_interface(self): + """Disabling irrelevant inherited test.""" + pass + + def test_relation_interface_dict(self): + """Disabling irrelevant inherited test.""" + pass + + def test_hide_relation_on_broken_event(self): + secret = self.harness.charm.app.add_secret( + {"username": "test-username", "password": "test-password"} + ) + with self.assertLogs(logger, "INFO") as logs: + rel_id = self.add_relation(self.harness, self.provider) + self.harness.update_relation_data( + rel_id, + self.provider, + { + "version": "v1", + "requests": json.dumps( + [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "secret-user": secret.id, + } + ] + ), + "data": json.dumps( + { + "c759221a6c14c72a": { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "resource": DATABASE, + } + } + ), + }, + ) + + # make sure two events were fired + self.assertEqual(len(logs.output), 2) + self.assertListEqual( + [self.harness.charm.get_prefix(log) for log in logs.output], + ["on_resource_created", "on_cluster1_resource_created"], + ) + self.assertEqual(self.harness.charm.get_relation_size(logs.output[0]), 1) + + with self.assertLogs(logger, "INFO") as logs: + self.harness.remove_relation(rel_id) + + # Within the relation broken event the requirer should not show any relation + self.assertEqual(self.harness.charm.get_relation_size(logs.output[0]), 0) + self.assertEqual(self.harness.charm.get_prefix(logs.output[0]), "on_relation_broken") + + +class TestDatabaseRequires(DataRequirerBaseTests, unittest.TestCase): + metadata = METADATA + relation_name = DATABASE_RELATION_NAME + charm = ApplicationCharmDatabase + + app_name = "application" + provider = "database" + + DATABASE_FIELD = "database" + + def setUp(self): + self.harness = self.get_harness() + self.rel_id = self.add_relation(self.harness, self.provider) + self.harness.begin_with_initial_hooks() + + def test_requires_interface_functions_secrets(self): + """Check the functionality of each public interface function.""" + interface = self.harness.charm.requirer.interface + verify_relation_interface_functions(interface, self.rel_id) + relation = self.harness.model.get_relation(DATABASE_RELATION_NAME, self.rel_id) + assert relation + + # Get remote data + rel_data = interface.repository(self.rel_id, relation.app).get_data() + assert rel_data == {} + + # Get my data + rel_data = interface.repository(self.rel_id).get_data() + assert rel_data == { + "alias": "cluster1", + "version": "v1", + "requests": [ + { + "resource": "data_platform", + "request-id": "c759221a6c14c72a", + "salt": "kkkkkkkk", + "extra-user-roles": "CREATEDB,CREATEROLE", + "external-node-connectivity": False, + }, + { + "resource": "", + "request-id": "9ecfabfbb5258f88", + "salt": "xxxxxxxx", + "external-node-connectivity": False, + "entity-type": "USER", + }, + ], + } + + @patch.object(charm, "_on_resource_created") + def test_on_resource_created_secrets(self, _on_resource_created): + """Asserts on_resource_created is called when the credentials are set in the relation.""" + # Simulate sharing the credentials of a new created database. + secret = self.harness.charm.app.add_secret( + {"username": "test-username", "password": "test-password"} + ) + + self.harness.update_relation_data( + self.rel_id, + self.provider, + { + "version": "v1", + "requests": json.dumps( + [ + { + f"{PROV_SECRET_PREFIX}user": secret.id, + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + } + ] + ), + "data": json.dumps( + { + "c759221a6c14c72a": { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "resource": DATABASE, + } + } + ), + }, + ) + + # Assert the correct hook is called. + _on_resource_created.assert_called_once() + + # Check that the username and the password are present in the relation + # using the requires charm library event. + event = _on_resource_created.call_args[0][0] + assert event.response.secret_user == secret.id + assert event.response.username == "test-username" + assert event.response.password == "test-password" + + assert self.harness.charm.requirer.is_resource_created( + self.rel_id, event.response.request_id + ) + assert self.harness.charm.requirer.are_all_resources_created(self.rel_id) + + rel_id = self.add_relation(self.harness, self.provider) + + secret2 = self.harness.charm.app.add_secret( + {"username": "test-username-2", "password": "test-password-2"} + ) + self.harness.update_relation_data( + rel_id, + self.provider, + { + "version": "v1", + "requests": json.dumps( + [ + { + f"{PROV_SECRET_PREFIX}user": secret2.id, + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + } + ] + ), + "data": json.dumps( + { + "c759221a6c14c72a": { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "resource": DATABASE, + } + } + ), + }, + ) + + # Assert the correct hook is called. + assert _on_resource_created.call_count == 2 + + # Check that the username and the password are present in the relation + # using the requires charm library event. + event = _on_resource_created.call_args[0][0] + assert event.response.secret_user == secret2.id + assert event.response.username == "test-username-2" + assert event.response.password == "test-password-2" + + assert self.harness.charm.requirer.is_resource_created(rel_id, event.response.request_id) + assert self.harness.charm.requirer.are_all_resources_created(rel_id) + + @patch.object(charm, "_on_resource_entity_created") + @pytest.mark.usefixtures("only_with_juju_secrets") + def test_on_resource_entity_created_secrets(self, _on_resource_entity_created): + """Asserts on_resource_entity_created is called when the credentials are set in the relation.""" + secret = self.harness.charm.app.add_secret( + {"entity-name": "test-username", "entity-password": "test-password"} + ) + + self.harness.update_relation_data( + self.rel_id, + self.provider, + { + "data": json.dumps( + { + "9ecfabfbb5258f88": { + "salt": "xxxxxxxx", + "request-id": "9ecfabfbb5258f88", + "entity-type": ENTITY_USER, + } + } + ), + "version": "v1", + "requests": json.dumps( + [ + { + "salt": "xxxxxxxx", + "request-id": "9ecfabfbb5258f88", + "secret-entity": secret.id, + } + ] + ), + }, + ) + + # Assert the correct hook is called. + _on_resource_entity_created.assert_called_once() + + # Check that the entity-type, entity-name and entity-password are present in the relation. + event = _on_resource_entity_created.call_args[0][0] + assert event.response.secret_entity == secret.id + assert event.response.entity_name == "test-username" + assert event.response.entity_password == "test-password" + + # Reset the mock call count. + _on_resource_entity_created.reset_mock() + + rel_id = self.add_relation(self.harness, self.provider) + + secret2 = self.harness.charm.app.add_secret({"entity-name": "test-groupname"}) + + self.harness.update_relation_data( + rel_id, + self.provider, + { + "data": json.dumps( + { + "9ecfabfbb5258f88": { + "salt": "xxxxxxxx", + "request-id": "9ecfabfbb5258f88", + "entity-type": ENTITY_GROUP, + } + } + ), + "version": "v1", + "requests": json.dumps( + [ + { + "salt": "xxxxxxxx", + "request-id": "9ecfabfbb5258f88", + "secret-entity": secret2.id, + } + ] + ), + }, + ) + # Assert the correct hook is called. + _on_resource_entity_created.assert_called_once() + + # Check that the entity-type and entity-name are present in the relation. + event = _on_resource_entity_created.call_args[0][0] + assert event.response.secret_entity == secret2.id + assert event.response.entity_name == "test-groupname" + assert event.response.entity_password is None + + def test_fetch_relation_data_secrets_fields(self): + # Set user secret for the relation. + secret = self.harness.charm.app.add_secret( + {"username": "test-username", "password": "test-password"} + ) + + self.harness.update_relation_data( + self.rel_id, + self.provider, + { + "version": "v1", + "requests": json.dumps( + [ + { + f"{PROV_SECRET_PREFIX}user": secret.id, + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + } + ] + ), + }, + ) + # Set some data in the relation. + self.harness.update_relation_data(self.rel_id, self.provider, {"somedata": "somevalue"}) + + # Check the data using the charm library function + # (the diff/data key should not be present). + relation = self.harness.model.get_relation(DATABASE_RELATION_NAME, self.rel_id) + assert relation + repository: OpsRepository = self.harness.charm.requirer.interface.repository( + self.rel_id, relation.app + ) + data = repository.get_data() + assert data + assert data["somedata"] == "somevalue" + assert data.get("version") + assert data.get("requests") + + assert repository.get_field("somedata") == "somevalue" + assert repository.get_field("non-existing") is None + assert ( + repository.get_secret_field("password", SecretGroup("user"), uri=secret.id) + == "test-password" + ) + + @pytest.mark.usefixtures("use_caplog") + @pytest.mark.usefixtures("only_with_juju_secrets") + def test_fetch_my_relation_data_and_fields_secrets(self): + # Set some data in the relation. + self.harness.update_relation_data(self.rel_id, self.app_name, {"somedata": "somevalue"}) + + # Check the data using the charm library function + # (the diff/data key should not be present). + repository: OpsRepository = self.harness.charm.requirer.interface.repository( + self.rel_id, self.harness.charm.app + ) + data = repository.get_data() + assert data == { + "alias": "cluster1", + "somedata": "somevalue", + "version": "v1", + "requests": [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "resource": "data_platform", + "extra-user-roles": "CREATEDB,CREATEROLE", + "external-node-connectivity": False, + }, + { + "salt": "xxxxxxxx", + "request-id": "9ecfabfbb5258f88", + "resource": "", + "entity-type": "USER", + "external-node-connectivity": False, + }, + ], + } + + data = repository.get_field("somedata") + assert data == "somevalue" + + data = repository.get_field("non-existing-data") + assert data is None + + data = repository.get_fields("non-existing-data") + assert data == {} + + self.harness.set_leader(False) + with self._caplog.at_level(logging.ERROR): + assert repository.get_field("somedata") is None + assert ( + "This operation (get_field) can only be performed by the leader unit" + in self._caplog.text + ) + + @patch.object(charm, "_on_endpoints_changed") + def test_on_endpoints_changed(self, _on_endpoints_changed): + """Asserts the correct call to on_endpoints_changed.""" + # Simulate adding endpoints to the relation. + self.harness.update_relation_data( + self.rel_id, + self.provider, + { + "requests": json.dumps( + [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "endpoints": "host1:port,host2:port", + } + ] + ), + "data": json.dumps( + { + "c759221a6c14c72a": { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "resource": DATABASE, + } + } + ), + }, + ) + + # Assert the correct hook is called. + _on_endpoints_changed.assert_called_once() + + # Check that the endpoints are present in the relation + # using the requires charm library event. + event = _on_endpoints_changed.call_args[0][0] + assert event.response.endpoints == "host1:port,host2:port" + + # Reset the mock call count. + _on_endpoints_changed.reset_mock() + + # Set the same data in the relation (no change in the endpoints). + self.harness.update_relation_data( + self.rel_id, + self.provider, + { + "requests": json.dumps( + [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "endpoints": "host1:port,host2:port", + } + ] + ) + }, + ) + + # Assert the hook was not called again. + _on_endpoints_changed.assert_not_called() + + # Then, change the endpoints in the relation. + self.harness.update_relation_data( + self.rel_id, + self.provider, + { + "requests": json.dumps( + [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "endpoints": "host1:port,host2:port,host3:port", + } + ] + ) + }, + ) + + # Assert the hook is called now. + _on_endpoints_changed.assert_called_once() + + @patch.object(charm, "_on_read_only_endpoints_changed") + def test_on_read_only_endpoints_changed(self, _on_read_only_endpoints_changed): + """Asserts the correct call to on_read_only_endpoints_changed.""" + # Simulate adding endpoints to the relation. + self.harness.update_relation_data( + self.rel_id, + self.provider, + { + "requests": json.dumps( + [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "read-only-endpoints": "host1:port,host2:port", + } + ] + ), + "data": json.dumps( + { + "c759221a6c14c72a": { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "resource": DATABASE, + } + } + ), + }, + ) + + # Assert the correct hook is called. + _on_read_only_endpoints_changed.assert_called_once() + + # Check that the endpoints are present in the relation + # using the requires charm library event. + event = _on_read_only_endpoints_changed.call_args[0][0] + assert event.response.read_only_endpoints == "host1:port,host2:port" + + # Reset the mock call count. + _on_read_only_endpoints_changed.reset_mock() + + # Set the same data in the relation (no change in the endpoints). + self.harness.update_relation_data( + self.rel_id, + self.provider, + { + "requests": json.dumps( + [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "read-only-endpoints": "host1:port,host2:port", + } + ] + ) + }, + ) + + # Assert the hook was not called again. + _on_read_only_endpoints_changed.assert_not_called() + + # Then, change the endpoints in the relation. + self.harness.update_relation_data( + self.rel_id, + self.provider, + { + "requests": json.dumps( + [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "read-only-endpoints": "host1:port,host2:port,host3:port", + } + ] + ) + }, + ) + + # Assert the hook is called now. + _on_read_only_endpoints_changed.assert_called_once() + + @patch.object(charm, "_on_resource_created") + def test_additional_fields_are_accessible(self, _on_resource_created): + """Asserts additional fields are accessible using the charm library after being set.""" + secret = self.harness.charm.app.add_secret({"tls": "true", "tls-ca": "deadbeef"}) + secret_user = self.harness.charm.app.add_secret( + {"username": "dead", "password": "beef", "uris": "host1:port,host2:port"} + ) + # Simulate setting the additional fields. + self.harness.update_relation_data( + self.rel_id, + self.provider, + { + "requests": json.dumps( + [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "secret-tls": secret.id, + "secret-user": secret_user.id, + "version": "1.0", + } + ] + ), + "data": json.dumps( + { + "c759221a6c14c72a": { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + "resource": DATABASE, + } + } + ), + }, + ) + + _on_resource_created.assert_called_once() + event = _on_resource_created.call_args[0][0] + + # Check that the fields are present in the relation + # using the requires charm library. + assert event.response.tls.get_secret_value() is True + assert event.response.tls_ca == "deadbeef" + assert event.response.uris == "host1:port,host2:port" + assert event.response.version == "1.0" + + def test_assign_relation_alias(self): + """Asserts the correct relation alias is assigned to the relation.""" + unit_name = f"{self.app_name}/0" + + # Reset the alias. + self.harness.update_relation_data(self.rel_id, unit_name, {"alias": ""}) + + # Call the function and check the alias. + self.harness.charm.requirer._assign_relation_alias(self.rel_id) + assert ( + self.harness.get_relation_data(self.rel_id, unit_name)["alias"] == CLUSTER_ALIASES[0] + ) + + # Add another relation and check that the second cluster alias was assigned to it. + second_rel_id = self.add_relation(self.harness, "another-database") + + assert ( + self.harness.get_relation_data(second_rel_id, unit_name)["alias"] == CLUSTER_ALIASES[1] + ) + + # Reset the alias and test again using the function call. + self.harness.update_relation_data(second_rel_id, unit_name, {"alias": ""}) + self.harness.charm.requirer._assign_relation_alias(second_rel_id) + assert ( + self.harness.get_relation_data(second_rel_id, unit_name)["alias"] == CLUSTER_ALIASES[1] + ) + + @patch.object(charm, "_on_cluster1_resource_created") + def test_emit_aliased_event(self, _on_cluster1_resource_created): + """Asserts the correct custom event is triggered.""" + # Reset the diff/data key in the relation to correctly emit the event. + self.harness.update_relation_data(self.rel_id, self.app_name, {"data": "{}"}) + + # Check that the event wasn't triggered yet. + _on_cluster1_resource_created.assert_not_called() + + # Call the emit function and assert the desired event is triggered. + relation = self.harness.charm.model.get_relation(DATABASE_RELATION_NAME, self.rel_id) + mock_event = Mock() + mock_event.app = self.harness.charm.model.get_app(self.app_name) + mock_event.unit = self.harness.charm.model.get_unit(f"{self.app_name}/0") + mock_event.relation = relation + response = ResourceProviderModel.model_validate({}) + self.harness.charm.requirer._emit_aliased_event(mock_event, "resource_created", response) + _on_cluster1_resource_created.assert_called_once() + + def test_get_relation_alias(self): + """Asserts the correct relation alias is returned.""" + # Assert the relation got the first cluster alias. + assert self.harness.charm.requirer._get_relation_alias(self.rel_id) == CLUSTER_ALIASES[0] + + @parameterized.expand([(True,), (False,)]) + def test_resource_events(self, is_leader: bool): + # Test custom events creation + # Test that the events are emitted to both the leader + # and the non-leader units through is_leader parameter. + secret_user = self.harness.charm.app.add_secret({"username": "dead", "password": "beef"}) + + self.harness.set_leader(is_leader) + + # Define the events that need to be emitted. + # The event key is the event that should have been emitted + # and the data key is the data that will be updated in the + # relation databag to trigger that event. + events = [ + { + "event": ResourceCreatedEvent, + "data": { + "secret-user": secret_user.id, + "endpoints": "host1:port", + "read-only-endpoints": "host2:port", + }, + }, + { + "event": ResourceEndpointsChangedEvent, + "data": { + "endpoints": "host1:port,host3:port", + "read-only-endpoints": "host2:port,host4:port", + }, + }, + { + "event": ResourceReadOnlyEndpointsChangedEvent, + "data": { + "read-only-endpoints": "host2:port,host4:port,host5:port", + }, + }, + ] + + # Define the list of all events that should be checked + # when something changes in the relation databag. + all_events = [event["event"] for event in events] + + for event in events: + # Diff stored in the data field of the relation databag in the previous event. + # This is important to test the next events in a consistent way. + previous_event_diff = self.harness.get_relation_data( + self.rel_id, f"{self.app_name}/0" + ).get("data", "") + + # Test the event being emitted by the application. + with capture_events(self.harness.charm, *all_events) as captured_events: + self.harness.update_relation_data( + self.rel_id, + self.provider, + { + "version": "v1", + "requests": json.dumps( + [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + } + | event["data"] + ] + ), + "data": json.dumps( + { + "c759221a6c14c72a": { + "salt": "kkkkkkk", + "request-id": "c759221a6c14c72a", + "resource": DATABASE, + } + } + ), + }, + ) + + # There are two events (one aliased and the other without alias). + assert len(captured_events) == 2 + + # Check that the events that were emitted are the ones that were expected. + assert all( + isinstance(captured_event, event["event"]) for captured_event in captured_events + ) + + # Test that the remote app name is available in the event. + for captured in captured_events: + assert captured.app.name == self.provider + + # Reset the diff data to trigger the event again later. + self.harness.update_relation_data( + self.rel_id, f"{self.app_name}/0", {"data": previous_event_diff} + ) + + # Test the event being emitted by the unit. + with capture_events(self.harness.charm, *all_events) as captured_events: + self.harness.update_relation_data( + self.rel_id, + f"{self.provider}/0", + { + "version": "v1", + "requests": json.dumps( + [ + { + "salt": "kkkkkkkk", + "request-id": "c759221a6c14c72a", + } + | event["data"] + ] + ), + }, + ) + + # There are two events (one aliased and the other without alias). + assert len(captured_events) == 2 + + # Check that the events that were emitted are the ones that were expected. + assert all( + isinstance(captured_event, event["event"]) for captured_event in captured_events + ) + + # Test that the remote unit name is available in the event. + for captured in captured_events: + assert captured.unit.name == f"{self.provider}/0" From 1927facd2356d04904465953a1017c8b3a0b981c Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Fri, 5 Sep 2025 17:31:22 +0200 Subject: [PATCH 02/34] # This is a combination of 7 commits. # This is the 1st commit message: feat: DPV1 # The commit message #2 will be skipped: # feat: unit tests + linting # The commit message #3 will be skipped: # fix: linting # The commit message #4 will be skipped: # fix: enable tests # The commit message #5 will be skipped: # fix: only correct tests # The commit message #6 will be skipped: # fix: application deployed # The commit message #7 will be skipped: # fix: build charms --- .github/workflows/ci.yaml | 75 +- .../data_platform_libs/v1/data_interfaces.py | 179 +- pyproject.toml | 2 +- requirements/v1/requirements.txt | 2 +- tests/{ => v0}/conftest.py | 0 tests/v1/conftest.py | 78 + tests/v1/integration/__init__.py | 2 + .../application-charm/actions.yaml | 46 + .../application-charm/charmcraft.yaml | 96 ++ .../lib/charms/data_platform_libs/v1/.gitkeep | 0 .../application-charm/metadata.yaml | 35 + .../integration/application-charm/poetry.lock | 341 ++++ .../application-charm/pyproject.toml | 20 + .../application-charm/src/charm.py | 492 ++++++ .../charmcraft.yaml | 84 + .../metadata.yaml | 12 + .../backward-compatibility-charm/poetry.lock | 341 ++++ .../pyproject.toml | 18 + .../backward-compatibility-charm/src/charm.py | 49 + tests/v1/integration/conftest.py | 215 +++ .../integration/database-charm/actions.yaml | 140 ++ .../database-charm/charmcraft.yaml | 95 ++ .../lib/charms/data_platform_libs/v1/.gitkeep | 0 .../integration/database-charm/metadata.yaml | 34 + .../v1/integration/database-charm/poetry.lock | 361 ++++ .../integration/database-charm/pyproject.toml | 20 + .../integration/database-charm/src/charm.py | 465 +++++ .../dummy-database-charm/actions.yaml | 126 ++ .../dummy-database-charm/charmcraft.yaml | 95 ++ .../lib/charms/data_platform_libs/v1/.gitkeep | 0 .../dummy-database-charm/metadata.yaml | 34 + .../dummy-database-charm/poetry.lock | 361 ++++ .../dummy-database-charm/pyproject.toml | 20 + .../dummy-database-charm/src/charm.py | 222 +++ tests/v1/integration/helpers.py | 314 ++++ tests/v1/integration/kafka-charm/actions.yaml | 25 + .../integration/kafka-charm/charmcraft.yaml | 84 + .../lib/charms/data_platform_libs/v1/.gitkeep | 0 .../v1/integration/kafka-charm/metadata.yaml | 16 + .../v1/integration/kafka-charm/pyproject.toml | 18 + tests/v1/integration/kafka-charm/src/charm.py | 230 +++ .../kafka-connect-charm/actions.yaml | 13 + .../kafka-connect-charm/charmcraft.yaml | 84 + .../lib/charms/data_platform_libs/v1/.gitkeep | 0 .../kafka-connect-charm/metadata.yaml | 16 + .../kafka-connect-charm/pyproject.toml | 18 + .../kafka-connect-charm/src/charm.py | 165 ++ .../integration/opensearch-charm/actions.yaml | 4 + .../opensearch-charm/charmcraft.yaml | 84 + .../lib/charms/data_platform_libs/v1/.gitkeep | 0 .../opensearch-charm/metadata.yaml | 16 + .../opensearch-charm/pyproject.toml | 18 + .../integration/opensearch-charm/src/charm.py | 165 ++ .../test_backward_compatibility_charm.py | 90 + tests/v1/integration/test_charm.py | 1502 +++++++++++++++++ tests/v1/integration/test_kafka_charm.py | 220 +++ .../integration/test_kafka_connect_charm.py | 163 ++ tests/v1/integration/test_opensearch_charm.py | 142 ++ tests/v1/unit/test_data_interfaces.py | 14 +- tox.ini | 67 +- 60 files changed, 7462 insertions(+), 66 deletions(-) rename tests/{ => v0}/conftest.py (100%) create mode 100644 tests/v1/conftest.py create mode 100644 tests/v1/integration/__init__.py create mode 100644 tests/v1/integration/application-charm/actions.yaml create mode 100644 tests/v1/integration/application-charm/charmcraft.yaml create mode 100644 tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/.gitkeep create mode 100644 tests/v1/integration/application-charm/metadata.yaml create mode 100644 tests/v1/integration/application-charm/poetry.lock create mode 100644 tests/v1/integration/application-charm/pyproject.toml create mode 100755 tests/v1/integration/application-charm/src/charm.py create mode 100644 tests/v1/integration/backward-compatibility-charm/charmcraft.yaml create mode 100644 tests/v1/integration/backward-compatibility-charm/metadata.yaml create mode 100644 tests/v1/integration/backward-compatibility-charm/poetry.lock create mode 100644 tests/v1/integration/backward-compatibility-charm/pyproject.toml create mode 100755 tests/v1/integration/backward-compatibility-charm/src/charm.py create mode 100644 tests/v1/integration/conftest.py create mode 100644 tests/v1/integration/database-charm/actions.yaml create mode 100644 tests/v1/integration/database-charm/charmcraft.yaml create mode 100644 tests/v1/integration/database-charm/lib/charms/data_platform_libs/v1/.gitkeep create mode 100644 tests/v1/integration/database-charm/metadata.yaml create mode 100644 tests/v1/integration/database-charm/poetry.lock create mode 100644 tests/v1/integration/database-charm/pyproject.toml create mode 100755 tests/v1/integration/database-charm/src/charm.py create mode 100644 tests/v1/integration/dummy-database-charm/actions.yaml create mode 100644 tests/v1/integration/dummy-database-charm/charmcraft.yaml create mode 100644 tests/v1/integration/dummy-database-charm/lib/charms/data_platform_libs/v1/.gitkeep create mode 100644 tests/v1/integration/dummy-database-charm/metadata.yaml create mode 100644 tests/v1/integration/dummy-database-charm/poetry.lock create mode 100644 tests/v1/integration/dummy-database-charm/pyproject.toml create mode 100755 tests/v1/integration/dummy-database-charm/src/charm.py create mode 100644 tests/v1/integration/helpers.py create mode 100644 tests/v1/integration/kafka-charm/actions.yaml create mode 100644 tests/v1/integration/kafka-charm/charmcraft.yaml create mode 100644 tests/v1/integration/kafka-charm/lib/charms/data_platform_libs/v1/.gitkeep create mode 100644 tests/v1/integration/kafka-charm/metadata.yaml create mode 100644 tests/v1/integration/kafka-charm/pyproject.toml create mode 100755 tests/v1/integration/kafka-charm/src/charm.py create mode 100644 tests/v1/integration/kafka-connect-charm/actions.yaml create mode 100644 tests/v1/integration/kafka-connect-charm/charmcraft.yaml create mode 100644 tests/v1/integration/kafka-connect-charm/lib/charms/data_platform_libs/v1/.gitkeep create mode 100644 tests/v1/integration/kafka-connect-charm/metadata.yaml create mode 100644 tests/v1/integration/kafka-connect-charm/pyproject.toml create mode 100755 tests/v1/integration/kafka-connect-charm/src/charm.py create mode 100644 tests/v1/integration/opensearch-charm/actions.yaml create mode 100644 tests/v1/integration/opensearch-charm/charmcraft.yaml create mode 100644 tests/v1/integration/opensearch-charm/lib/charms/data_platform_libs/v1/.gitkeep create mode 100644 tests/v1/integration/opensearch-charm/metadata.yaml create mode 100644 tests/v1/integration/opensearch-charm/pyproject.toml create mode 100755 tests/v1/integration/opensearch-charm/src/charm.py create mode 100644 tests/v1/integration/test_backward_compatibility_charm.py create mode 100644 tests/v1/integration/test_charm.py create mode 100644 tests/v1/integration/test_kafka_charm.py create mode 100644 tests/v1/integration/test_kafka_connect_charm.py create mode 100644 tests/v1/integration/test_opensearch_charm.py diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e4c5ffec..850972b6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -40,6 +40,9 @@ jobs: juju-version: - libjuju-version: "2.9.49.1" - libjuju-version: "3.6.1.0" + exclude: + - libs-version: 1 + juju-version: {libjuju-version: "2.9.49.1"} steps: - name: Checkout uses: actions/checkout@v4 @@ -51,7 +54,7 @@ jobs: env: LIBJUJU_VERSION_SPECIFIER: "==${{ matrix.juju-version.libjuju-version }}" - integration-test: + integration-test-v0: strategy: fail-fast: false matrix: @@ -155,3 +158,73 @@ jobs: with: app: data-platform-libs model: testing + + integration-test-v1: + strategy: + fail-fast: false + matrix: + ubuntu-versions: + # Update whenever charmcraft.yaml is changed + - series: jammy + bases-index: 0 + - series: noble + bases-index: 1 + tox-environments: + - integration-db-v1 + - integration-opensearch-v1 + - integration-kafka-v1 + - integration-kafka-connect-v1 + - integration-backward-compatibility-v1 + juju-version: + - juju-bootstrap-option: "3.6.1" + juju-snap-channel: "3.6/stable" + libjuju-version: "3.6.1.0" + name: V1 -- ${{ matrix.tox-environments }} Juju ${{ matrix.juju-version.juju-snap-channel}} -- ${{ matrix.ubuntu-versions.series }} + needs: + - lint + - unit-test + runs-on: ubuntu-latest + timeout-minutes: 120 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup operator environment + # TODO: Replace with custom image on self-hosted runner + uses: charmed-kubernetes/actions-operator@main + with: + provider: microk8s + channel: "1.27-strict/stable" + bootstrap-options: "--agent-version ${{ matrix.juju-version.juju-bootstrap-option }}" + juju-channel: ${{ matrix.juju-version.juju-snap-channel }} + charmcraft-channel: "3.x/stable" + - name: Download packed charm(s) + uses: actions/download-artifact@v4 + with: + name: ${{ needs.build.outputs.artifact-name }} + - name: Select tests + id: select-tests + run: | + if [ "${{ github.event_name }}" == "schedule" ] + then + echo Running unstable and stable tests + echo "mark_expression=" >> $GITHUB_OUTPUT + else + echo Skipping unstable tests + echo "mark_expression=not unstable" >> $GITHUB_OUTPUT + fi + - name: Run integration tests + # set a predictable model name so it can be consumed by charm-logdump-action + run: tox run -e ${{ matrix.tox-environments }} -- -m '${{ steps.select-tests.outputs.mark_expression }}' --model testing --os-series=${{ matrix.ubuntu-versions.series }} --build-bases-index=${{ matrix.ubuntu-versions.bases-index }} + env: + CI_PACKED_CHARMS: ${{ needs.build.outputs.charms }} + LIBJUJU_VERSION_SPECIFIER: "==${{ matrix.juju-version.libjuju-version }}" + WEBSOCKETS_VERSION_SPECIFIER: ${{ env.WEBSOCKETS_VERSION_SPECIFIER }} + - name: Print debug-log + if: failure() + run: juju switch testing; juju debug-log --replay --no-tail + - name: Dump logs + uses: canonical/charm-logdump-action@main + if: failure() + with: + app: data-platform-libs + model: testing diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index dd16934f..b49947f7 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -170,7 +170,7 @@ def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: # Create configuration file for app config_file = self._render_app_config_file( - event.respones.username, + event.response.username, event.response.password, event.response.endpoints, ) @@ -286,6 +286,11 @@ def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: from pydantic_core import CoreSchema, core_schema from typing_extensions import TypeAliasType, override +try: + import psycopg +except ImportError: + psycopg = None + # The unique Charmhub library identifier, never change it LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" @@ -693,31 +698,36 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ if not secret_group: raise SecretsUnavailableError(field) - if (value := getattr(self, field)) is None: - continue - aliased_field = field_info.serialization_alias or field secret = repository.get_secret(secret_group, secret_uri=None) + + value = getattr(self, field) + actual_value = ( value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) - if secret: - content = secret.get_content() - full_content = copy.deepcopy(content) - full_content.update({aliased_field: actual_value}) - secret.set_content(full_content) - else: - secret = repository.add_secret( - aliased_field, - actual_value, - secret_group, - ) - if not secret or not secret.meta: - raise SecretError("No secret to send back") + if secret is None: + if actual_value: + secret = repository.add_secret( + aliased_field, + actual_value, + secret_group, + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + continue + content = secret.get_content() + full_content = copy.deepcopy(content) + + if actual_value is None: + full_content.pop(field, None) + else: + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) return handler(self) @@ -783,6 +793,7 @@ def extract_secrets(self, info: ValidationInfo): @model_serializer(mode="wrap") def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): """Serializes the model writing the secrets in their respective secrets.""" + _encountered_secrets: set[tuple[CachedSecret, str]] = set() if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): logger.debug("No secret parsing serialization as we're lacking context here.") return handler(self) @@ -797,8 +808,6 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ secret_group = field_info.metadata[0] if not secret_group: raise SecretsUnavailableError(field) - if (value := getattr(self, field)) is None: - continue aliased_field = field_info.serialization_alias or field secret_field = repository.secret_field(secret_group, aliased_field).replace( "-", "_" @@ -807,24 +816,41 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ secret = repository.get_secret( secret_group, secret_uri=secret_uri, short_uuid=short_uuid ) + + value = getattr(self, field) + actual_value = ( value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) - if secret: - content = secret.get_content() - full_content = copy.deepcopy(content) - full_content.update({aliased_field: actual_value}) - secret.set_content(full_content) + if secret is None: + if actual_value: + secret = repository.add_secret( + aliased_field, actual_value, secret_group, short_uuid + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + setattr(self, secret_field, secret.meta.id) + continue + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if actual_value is None: + full_content.pop(field, None) + _encountered_secrets.add((secret, secret_field)) else: - secret = repository.add_secret( - aliased_field, actual_value, secret_group, short_uuid - ) - if not secret or not secret.meta: - raise SecretError("No secret to send back") - setattr(self, secret_field, secret.meta.id) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + + # Delete all empty secrets and clean up their fields. + for secret, secret_field in _encountered_secrets: + if not secret.get_content(): + # Setting a field to '' deletes it + setattr(self, secret_field, "") + repository.delete_secret(secret.label) return handler(self) @@ -1044,7 +1070,7 @@ def write_fields(self, mapping: dict[str, Any]) -> None: ... def write_secret_field( - self, field: str, value: Any, group: SecretGroup, uri_to_databag: bool = False + self, field: str, value: Any, group: SecretGroup ) -> CachedSecret | None: """Writes a secret field.""" ... @@ -1060,6 +1086,11 @@ def add_secret( """Gets a value for a field stored in a secret group.""" ... + @abstractmethod + def delete_secret(self, label: str): + """Deletes a secret by its label.""" + ... + @abstractmethod def delete_field(self, field: str) -> None: """Deletes a field.""" @@ -1390,6 +1421,11 @@ def add_secret( return secret + @override + @ensure_leader_for_app + def delete_secret(self, label: str) -> None: + self.secrets.remove(label) + @final class OpsRelationRepository(OpsRepository): @@ -1841,13 +1877,11 @@ class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): This class defines the events that the database can emit. """ - bulk_resources_requested = EventSource(BulkResourcesRequestedEvent[TRequirerCommonModel]) - resource_requested = EventSource(ResourceRequestedEvent[TRequirerCommonModel]) - resource_entity_requested = EventSource(ResourceEntityRequestedEvent[TRequirerCommonModel]) - resource_entity_permissions_changed = EventSource( - ResourceEntityPermissionsChangedEvent[TRequirerCommonModel] - ) - mtls_cert_updated = EventSource(MtlsCertUpdatedEvent[TRequirerCommonModel]) + bulk_resources_requested = EventSource(BulkResourcesRequestedEvent) + resource_requested = EventSource(ResourceRequestedEvent) + resource_entity_requested = EventSource(ResourceEntityRequestedEvent) + resource_entity_permissions_changed = EventSource(ResourceEntityPermissionsChangedEvent) + mtls_cert_updated = EventSource(MtlsCertUpdatedEvent) class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): @@ -1934,12 +1968,10 @@ class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): This class defines the events that the database can emit. """ - resource_created = EventSource(ResourceCreatedEvent[TResourceProviderModel]) - resource_entity_created = EventSource(ResourceEntityCreatedEvent[TResourceProviderModel]) - endpoints_changed = EventSource(ResourceEndpointsChangedEvent[TResourceProviderModel]) - read_only_endpoints_changed = EventSource( - ResourceReadOnlyEndpointsChangedEvent[TResourceProviderModel] - ) + resource_created = EventSource(ResourceCreatedEvent) + resource_entity_created = EventSource(ResourceEntityCreatedEvent) + endpoints_changed = EventSource(ResourceEndpointsChangedEvent) + read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) ############################################################################## @@ -2027,7 +2059,6 @@ def compute_diff( new_data = request.model_dump( mode="json", exclude={"data"}, - context={"repository": repository}, exclude_none=True, exclude_defaults=True, ) @@ -2154,7 +2185,7 @@ def _handle_bulk_event( This allows for the developer to process the diff and store it themselves """ for request in request_model.requests: - # Compute the diff withtout storing it so we can validate the diffs. + # Compute the diff without storing it so we can validate the diffs. _diff = self.compute_diff(event.relation, request, repository, store=False) self._validate_diff(event, _diff) @@ -2435,6 +2466,54 @@ def are_all_resources_created(self, rel_id: int) -> bool: if request.request_id ) + @staticmethod + def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: + # Actual checking method. + # No need to check for psycopg here, it's been checked before. + if not psycopg: + return False + + try: + with psycopg.connect(connection_string) as connection: + with connection.cursor() as cursor: + cursor.execute( + "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) + ) + return cursor.fetchone() is not None + except psycopg.Error as e: + logger.exception( + f"failed to check whether {plugin} plugin is enabled in the database: %s", + str(e), + ) + return False + + def is_postgresql_plugin_enabled(self, plugin: str, relation_id: int = 0) -> bool: + """Returns whether a plugin is enabled in the database. + + Args: + plugin: name of the plugin to check. + relation_id: Optional index to check the database (default: 0 - first relation). + """ + if not psycopg: + return False + + # Can't check a non existing relation. + if len(self.relations) <= relation_id: + return False + + relation_id = self.relations[relation_id].id + model = self.interface.build_model(relation_id=relation_id) + for request in model.requests: + if request.endpoints and request.username and request.password: + host = request.endpoints.split(":")[0] + username = request.username.get_secret_value() + password = request.password.get_secret_value() + + connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" + return self._is_pg_plugin_enabled(plugin, connection_string) + logger.info("No valid request to use to check for plugin.") + return False + ############################################################################## # Helpers for aliases ############################################################################## diff --git a/pyproject.toml b/pyproject.toml index 930fe8e4..3098bcc2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,7 +66,7 @@ ignore = ["E501", "D107"] per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104", "E999"]} [tool.ruff.lint.mccabe] -max-complexity = 12 +max-complexity = 13 [tool.pyright] include = ["src", "lib"] diff --git a/requirements/v1/requirements.txt b/requirements/v1/requirements.txt index b2c85c68..d443bd02 100644 --- a/requirements/v1/requirements.txt +++ b/requirements/v1/requirements.txt @@ -1,2 +1,2 @@ ops >= 2.1.1 -pydantic>=2,<3 +pydantic>=2.11,<3 diff --git a/tests/conftest.py b/tests/v0/conftest.py similarity index 100% rename from tests/conftest.py rename to tests/v0/conftest.py diff --git a/tests/v1/conftest.py b/tests/v1/conftest.py new file mode 100644 index 00000000..c7caaf70 --- /dev/null +++ b/tests/v1/conftest.py @@ -0,0 +1,78 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. +import argparse +import os +from importlib.metadata import version +from unittest.mock import PropertyMock + +import pytest +from ops import JujuVersion +from pytest_mock import MockerFixture + + +def pytest_addoption(parser): + parser.addoption( + "--os-series", help="Ubuntu series for dp libs charm (e.g. jammy)", default="jammy" + ) + parser.addoption( + "--build-bases-index", + type=int, + help="Index of charmcraft.yaml base that matches --os-series", + default=0, + ) + + +def pytest_configure(config): + if (config.option.os_series is None) ^ (config.option.build_bases_index is None): + raise argparse.ArgumentError( + None, + "--os-series and --build-bases-index must be given together", + ) + # Note: Update defaults whenever charmcraft.yaml is changed + valid_combinations = [(0, "jammy"), (1, "noble")] + if (config.option.build_bases_index, config.option.os_series) not in valid_combinations: + raise argparse.ArgumentError( + None, f"Only base index combinations {valid_combinations} are accepted." + ) + + +@pytest.fixture(autouse=True) +def juju_has_secrets(mocker: MockerFixture): + """This fixture will force the usage of secrets whenever run on Juju 3.x. + + NOTE: This is needed, as normally JujuVersion is set to 0.0.0 in tests + (i.e. not the real juju version) + """ + if juju_version := os.environ.get("LIBJUJU_VERSION_SPECIFIER"): + juju_version.replace("==", "") + juju_version = juju_version[2:].split(".")[0] + else: + juju_version = version("juju") + + if juju_version < "3": + mocker.patch.object(JujuVersion, "has_secrets", new_callable=PropertyMock).return_value = ( + False + ) + return False + else: + mocker.patch.object(JujuVersion, "has_secrets", new_callable=PropertyMock).return_value = ( + True + ) + return True + + +@pytest.fixture +def only_with_juju_secrets(juju_has_secrets): + """Pretty way to skip Juju 3 tests.""" + if not juju_has_secrets: + pytest.skip("Secrets test only applies on Juju 3.x") + + +@pytest.fixture +def only_without_juju_secrets(juju_has_secrets): + """Pretty way to skip Juju 2-specific tests. + + Typically: to save CI time, when the same check were executed in a Juju 3-specific way already + """ + if juju_has_secrets: + pytest.skip("Skipping legacy secrets tests") diff --git a/tests/v1/integration/__init__.py b/tests/v1/integration/__init__.py new file mode 100644 index 00000000..db3bfe1a --- /dev/null +++ b/tests/v1/integration/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/tests/v1/integration/application-charm/actions.yaml b/tests/v1/integration/application-charm/actions.yaml new file mode 100644 index 00000000..fbba614f --- /dev/null +++ b/tests/v1/integration/application-charm/actions.yaml @@ -0,0 +1,46 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. +get-plugin-status: + description: Get plugin status (enabled/disabled). + params: + plugin: + type: string + description: The plugin to check the status. + +reset-unit-status: + description: Set empty status message (ActiveStatus) + +get-relation-field: + description: Get fields from the second-database relation + field: + type: string + description: Relation field + +get-relation-self-side-field: + description: Set fields from the relation + params: + relation_id: + type: integer + description: The relation's unique ID + field: + type: string + description: Relation field + +set-relation-field: + description: Set fields from the second-database relation + field: + type: string + description: Relation field + value: + type: string + description: Value of the field to set + +delete-relation-field: + description: Delete fields from the sedond-database relation + field: + type: string + description: Relation field + +set-mtls-cert: + description: Sets the MTLS cert for the requirer application. + \ No newline at end of file diff --git a/tests/v1/integration/application-charm/charmcraft.yaml b/tests/v1/integration/application-charm/charmcraft.yaml new file mode 100644 index 00000000..97a4b3c2 --- /dev/null +++ b/tests/v1/integration/application-charm/charmcraft.yaml @@ -0,0 +1,96 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +type: charm +# Whenever "bases" is changed: +# - Update tests/integration/conftest.py::pytest_configure() +# - Update .github/workflow/ci.yaml integration-test matrix +platforms: + ubuntu@22.04:amd64: + ubuntu@24.04:amd64: + +# Files implicitly created by charmcraft without a part: +# - dispatch (https://github.com/canonical/charmcraft/pull/1898) +# - manifest.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L259) +# Files implicitly copied/"staged" by charmcraft without a part: +# - actions.yaml, config.yaml, metadata.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L290-L293 +# https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L156-L157) +parts: + poetry-deps: + plugin: nil + build-packages: + - curl + override-build: | + # Use environment variable instead of `--break-system-packages` to avoid failing on older + # versions of pip that do not recognize `--break-system-packages` + # `--user` needed (in addition to `--break-system-packages`) for Ubuntu >=24.04 + PIP_BREAK_SYSTEM_PACKAGES=true python3 -m pip install --user --upgrade pip==24.3.1 # renovate: charmcraft-pip-latest + + # Use uv to install poetry so that a newer version of Python can be installed if needed by poetry + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.5.15/uv-installer.sh | sh # renovate: charmcraft-uv-latest + # poetry 2.0.0 requires Python >=3.9 + if ! "$HOME/.local/bin/uv" python find '>=3.9' + then + # Use first Python version that is >=3.9 and available in an Ubuntu LTS + # (to reduce the number of Python versions we use) + "$HOME/.local/bin/uv" python install 3.10.12 # renovate: charmcraft-python-ubuntu-22.04 + fi + "$HOME/.local/bin/uv" tool install --no-python-downloads --python '>=3.9' poetry==2.0.0 --with poetry-plugin-export==1.8.0 # renovate: charmcraft-poetry-latest + + ln -sf "$HOME/.local/bin/poetry" /usr/local/bin/poetry + # "charm-poetry" part name is arbitrary; use for consistency + # Avoid using "charm" part name since that has special meaning to charmcraft + charm-poetry: + # By default, the `poetry` plugin creates/stages these directories: + # - lib, src + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L76-L78) + # - venv + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L95 + # https://github.com/canonical/craft-parts/blob/afb0d652eb330b6aaad4f40fbd6e5357d358de47/craft_parts/plugins/base.py#L270) + plugin: poetry + source: . + after: + - poetry-deps + poetry-export-extra-args: ['--only', 'main,charm-libs'] + build-packages: + - libffi-dev # Needed to build Python dependencies with Rust from source + - libssl-dev # Needed to build Python dependencies with Rust from source + - pkg-config # Needed to build Python dependencies with Rust from source + - libpq-dev + override-build: | + # Workaround for https://github.com/canonical/charmcraft/issues/2068 + # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source + if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] + then + snap install rustup --classic + else + apt-get install rustup -y + fi + + # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu + # archive—which means the rustup version could be updated at any time. Print rustup version + # to build log to make changes to the snap's rustup version easier to track + rustup --version + + # rpds-py (Python package) >=0.19.0 requires rustc >=1.76, which is not available in the + # Ubuntu 22.04 archive. Install rustc and cargo using rustup instead of the Ubuntu archive + rustup set profile minimal + rustup default 1.83.0 # renovate: charmcraft-rust-latest + + + craftctl default + # Include requirements.txt in *.charm artifact for easier debugging + cp requirements.txt "$CRAFT_PART_INSTALL/requirements.txt" + + libpq: + build-packages: + - libpq-dev + plugin: dump + source: /usr/lib/ + source-type: local + stage: + - lib/ + organize: + "*-linux-gnu/libpq.so*": lib/ diff --git a/tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/.gitkeep b/tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/tests/v1/integration/application-charm/metadata.yaml b/tests/v1/integration/application-charm/metadata.yaml new file mode 100644 index 00000000..983b8301 --- /dev/null +++ b/tests/v1/integration/application-charm/metadata.yaml @@ -0,0 +1,35 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. +name: application +description: | + Data platform libs application charm used in integration tests. +summary: | + Data platform libs application meant to be used + only for testing of the libs in this repository. + +requires: + first-database-db: + interface: database_client + first-database-roles: + interface: database_client + second-database-db: + interface: database_client + multiple-database-clusters: + interface: database_client + aliased-multiple-database-clusters: + interface: database_client + limit: 2 + kafka-client-topic: + interface: kafka_client + kafka-client-roles: + interface: kafka_client + kafka-split-pattern-client: + interface: kafka_client + opensearch-client-index: + interface: opensearch_client + opensearch-client-roles: + interface: opensearch_client + connect-source: + interface: connect_client + connect-sink: + interface: connect_client diff --git a/tests/v1/integration/application-charm/poetry.lock b/tests/v1/integration/application-charm/poetry.lock new file mode 100644 index 00000000..b6f8b787 --- /dev/null +++ b/tests/v1/integration/application-charm/poetry.lock @@ -0,0 +1,341 @@ +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + +[[package]] +name = "opentelemetry-api" +version = "1.36.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "opentelemetry_api-1.36.0-py3-none-any.whl", hash = "sha256:02f20bcacf666e1333b6b1f04e647dc1d5111f86b8e510238fcc56d7762cda8c"}, + {file = "opentelemetry_api-1.36.0.tar.gz", hash = "sha256:9a72572b9c416d004d492cbc6e61962c0501eaf945ece9b5a0f56597d8348aa0"}, +] + +[package.dependencies] +importlib-metadata = ">=6.0,<8.8.0" +typing-extensions = ">=4.5.0" + +[[package]] +name = "ops" +version = "2.23.1" +description = "The Python library behind great charms" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "ops-2.23.1-py3-none-any.whl", hash = "sha256:fdf58163beafd25180c12a4c7efaf1e76e5f8710508a97840c07055bb78b0c77"}, + {file = "ops-2.23.1.tar.gz", hash = "sha256:aecacd67ef7ca913f63f397e0330bfa93d70529a3ef71ed2d99e2bc232564ae3"}, +] + +[package.dependencies] +importlib-metadata = "*" +opentelemetry-api = ">=1.0,<2.0" +PyYAML = "==6.*" +websocket-client = "==1.*" + +[package.extras] +testing = ["ops-scenario (==7.23.1)"] +tracing = ["ops-tracing (==2.23.1)"] + +[[package]] +name = "pydantic" +version = "2.11.7" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, + {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "zipp" +version = "3.23.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.1" +python-versions = "^3.10.12" +content-hash = "3e90339ce16375daef4d9549ee45a773e9691caf1f8e2141decd3454017036e5" diff --git a/tests/v1/integration/application-charm/pyproject.toml b/tests/v1/integration/application-charm/pyproject.toml new file mode 100644 index 00000000..eb122466 --- /dev/null +++ b/tests/v1/integration/application-charm/pyproject.toml @@ -0,0 +1,20 @@ +[tool.poetry] +package-mode = false +requires-poetry = ">=2.0.0" + +[tool.poetry.dependencies] +python = "^3.10.12" +ops = ">=2.0.0,<3.0.0" +pydantic = ">=2.11" +psycopg2 = "^2.9.10" + +[tool.poetry.group.charm-libs.dependencies] +ops = ">=2.0.0" +pydantic = ">=2.11" +psycopg2 = "^2.9.10" + +[tool.poetry.requires-plugins] +poetry-plugin-export = ">=1.8" + +[build-system] +build-backend = "poetry.core.masonry.api" diff --git a/tests/v1/integration/application-charm/src/charm.py b/tests/v1/integration/application-charm/src/charm.py new file mode 100755 index 00000000..d6c4fded --- /dev/null +++ b/tests/v1/integration/application-charm/src/charm.py @@ -0,0 +1,492 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Application charm that connects to database charms. + +This charm is meant to be used only for testing +of the libraries in this repository. +""" + +import logging +import subprocess + +from ops import Relation +from ops.charm import ActionEvent, CharmBase +from ops.main import main +from ops.model import ActiveStatus +from pydantic import Field, SecretStr + +from charms.data_platform_libs.v1.data_interfaces import ( + ExtraSecretStr, + KafkaRequestModel, + KafkaResponseModel, + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEndpointsChangedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +logger = logging.getLogger(__name__) + +# Extra roles that this application needs when interacting with the database. +EXTRA_USER_ROLES = "SUPERUSER" +EXTRA_USER_ROLES_KAFKA = "producer,consumer" +EXTRA_USER_ROLES_OPENSEARCH = "admin,default" +CONSUMER_GROUP_PREFIX = "test-prefix" +BAD_URL = "http://badurl" + + +class ExtendedResponseModel(ResourceProviderModel): + topsecret: ExtraSecretStr = Field(default=None) + donttellanyone: ExtraSecretStr = Field(default=None) + + +class ApplicationCharm(CharmBase): + """Application charm that connects to database charms.""" + + def __init__(self, *args): + super().__init__(*args) + + # Default charm events. + self.framework.observe(self.on.start, self._on_start) + # self.framework.observe(self.on.get_plugin_status_action, self._on_get_plugin_status) + + # Events related to the first database that is requested + # (these events are defined in the database requires charm library). + database_name = f"{self.app.name.replace('-', '_')}_first_database_db" + self.first_database = ResourceRequirerEventHandler( + charm=self, + relation_name="first-database-db", + requests=[ + RequirerCommonModel(resource=database_name, extra_user_roles=EXTRA_USER_ROLES) + ], + response_model=ResourceProviderModel, + ) + self.first_database_roles = ResourceRequirerEventHandler( + self, + "first_database-roles", + requests=[ + RequirerCommonModel( + resource=database_name, entity_type="USER", extra_user_roles=EXTRA_USER_ROLES + ) + ], + response_model=ResourceProviderModel, + ) + self.framework.observe( + self.first_database.on.resource_created, self._on_first_database_created + ) + self.framework.observe( + self.first_database.on.endpoints_changed, self._on_first_database_endpoints_changed + ) + self.framework.observe( + self.first_database_roles.on.resource_entity_created, + self._on_first_database_entity_created, + ) + + # Events related to the second database that is requested + # (these events are defined in the database requires charm library). + database_name = f"{self.app.name.replace('-', '_')}_second_database_db" + + # Keeping the charm backwards compatible, for upgrades testing + self.second_database = ResourceRequirerEventHandler( + charm=self, + relation_name="second-database-db", + requests=[ + RequirerCommonModel( + resource=database_name, + extra_user_roles=EXTRA_USER_ROLES, + external_node_connectivity=True, + ) + ], + response_model=ExtendedResponseModel, + ) + + self.framework.observe( + self.second_database.on.resource_created, self._on_second_database_created + ) + self.framework.observe( + self.second_database.on.endpoints_changed, self._on_second_database_endpoints_changed + ) + + # Multiple database clusters charm events (clusters/relations without alias). + database_name = f"{self.app.name.replace('-', '_')}_multiple_database_clusters" + self.database_clusters = ResourceRequirerEventHandler( + charm=self, + relation_name="multiple-database-clusters", + requests=[ + RequirerCommonModel(resource=database_name, extra_user_roles=EXTRA_USER_ROLES) + ], + response_model=ResourceProviderModel, + ) + self.framework.observe( + self.database_clusters.on.resource_created, self._on_cluster_database_created + ) + self.framework.observe( + self.database_clusters.on.endpoints_changed, + self._on_cluster_endpoints_changed, + ) + + # Multiple database clusters charm events (defined dynamically + # in the database requires charm library, using the provided cluster/relation aliases). + database_name = f"{self.app.name.replace('-', '_')}_aliased_multiple_database_clusters" + cluster_aliases = ["cluster1", "cluster2"] # Aliases for the multiple clusters/relations. + self.aliased_database_clusters = ResourceRequirerEventHandler( + charm=self, + relation_name="aliased-multiple-database-clusters", + requests=[ + RequirerCommonModel(resource=database_name, extra_user_roles=EXTRA_USER_ROLES) + ], + response_model=ResourceProviderModel, + relation_aliases=cluster_aliases, + ) + # Each database cluster will have its own events + # with the name having the cluster/relation alias as the prefix. + self.framework.observe( + self.aliased_database_clusters.on.cluster1_resource_created, + self._on_cluster1_database_created, + ) + self.framework.observe( + self.aliased_database_clusters.on.cluster1_endpoints_changed, + self._on_cluster1_endpoints_changed, + ) + self.framework.observe( + self.aliased_database_clusters.on.cluster2_resource_created, + self._on_cluster2_database_created, + ) + self.framework.observe( + self.aliased_database_clusters.on.cluster2_endpoints_changed, + self._on_cluster2_endpoints_changed, + ) + + # Kafka events + + self.kafka = ResourceRequirerEventHandler( + charm=self, + relation_name="kafka-client-topic", + requests=[ + KafkaRequestModel( + resource="test-topic", + extra_user_roles=EXTRA_USER_ROLES_KAFKA, + consumer_group_prefix=CONSUMER_GROUP_PREFIX, + ) + ], + response_model=KafkaResponseModel, + ) + + self.kafka_split_pattern = ResourceRequirerEventHandler( + self, + relation_name="kafka-split-pattern-client", + requests=[ + KafkaRequestModel( + resource="test-topic-split-pattern", + extra_user_roles=EXTRA_USER_ROLES_KAFKA, + consumer_group_prefix=CONSUMER_GROUP_PREFIX, + ) + ], + response_model=KafkaResponseModel, + ) + self.framework.observe( + self.kafka_split_pattern.on.endpoints_changed, + self._on_kafka_bootstrap_server_changed, + ) + self.framework.observe( + self.kafka_split_pattern.on.resource_created, self._on_kafka_topic_created + ) + self.kafka_roles = ResourceRequirerEventHandler( + charm=self, + relation_name="kafka-client-roles", + requests=[ + KafkaRequestModel( + resource="test-topic", + entity_type="USER", + extra_user_roles=EXTRA_USER_ROLES_KAFKA, + consumer_group_prefix=CONSUMER_GROUP_PREFIX, + ) + ], + response_model=KafkaResponseModel, + ) + self.framework.observe( + self.kafka_roles.on.resource_entity_created, + self._on_kafka_entity_created, + ) + + self.framework.observe( + self.kafka.on.endpoints_changed, self._on_kafka_bootstrap_server_changed + ) + self.framework.observe(self.kafka.on.resource_created, self._on_kafka_topic_created) + + # Kafka Connect events + + self.connect_source = ResourceRequirerEventHandler( + self, + "connect-source", + requests=[RequirerCommonModel(resource="http://10.10.10.10:8000")], + response_model=ResourceProviderModel, + ) + self.connect_sink = ResourceRequirerEventHandler( + self, + "connect-sink", + requests=[RequirerCommonModel(resource=BAD_URL)], + response_model=ResourceProviderModel, + ) + + self.framework.observe( + self.connect_source.on.resource_created, self._on_connect_integration_created + ) + + self.framework.observe( + self.connect_source.on.endpoints_changed, + self._on_connect_endpoints_changed, + ) + + # OpenSearch events + + self.opensearch = ResourceRequirerEventHandler( + charm=self, + relation_name="opensearch-client-index", + requests=[ + RequirerCommonModel( + resource="test-index", extra_user_roles=EXTRA_USER_ROLES_OPENSEARCH + ) + ], + response_model=ResourceProviderModel, + ) + self.framework.observe( + self.opensearch.on.resource_created, self._on_opensearch_index_created + ) + # TODO: investigate authentication updated. + # self.framework.observe( + # self.opensearch.on.authentication_updated, self._on_opensearch_authentication_updated + # ) + + self.opensearch_roles = ResourceRequirerEventHandler( + charm=self, + relation_name="opensearch-client-roles", + requests=[ + RequirerCommonModel( + resource="test-index", + entity_type="USER", + extra_user_roles=EXTRA_USER_ROLES_OPENSEARCH, + ) + ], + response_model=ResourceProviderModel, + ) + self.framework.observe( + self.opensearch_roles.on.resource_entity_created, + self._on_opensearch_entity_created, + ) + + # actions + + self.framework.observe(self.on.reset_unit_status_action, self._on_reset_unit_status) + self.framework.observe(self.on.set_mtls_cert_action, self._on_set_mtls_cert) + + # Get/set/delete fields on second-database relations + self.framework.observe(self.on.get_relation_field_action, self._on_get_relation_field) + self.framework.observe( + self.on.get_relation_self_side_field_action, self._on_get_relation_self_side_field + ) + self.framework.observe(self.on.set_relation_field_action, self._on_set_relation_field) + self.framework.observe( + self.on.delete_relation_field_action, self._on_delete_relation_field + ) + self._relation_endpoints: list[ResourceRequirerEventHandler] = [ + self.first_database, + self.second_database, + self.database_clusters, + self.aliased_database_clusters, + self.kafka, + self.kafka_roles, + self.connect_source, + self.connect_sink, + self.opensearch, + self.opensearch_roles, + ] + + def _get_relation(self, relation_id: int) -> tuple[ResourceRequirerEventHandler, Relation]: + """Retrieve a relation by ID, together with the corresponding endpoint object ('Requires').""" + for source in self._relation_endpoints: + for relation in source.relations: + if relation.id == relation_id: + return (source, relation) + raise ValueError(f"Invalid relation id {relation_id}") + + def _on_start(self, _) -> None: + """Only sets an Active status.""" + self.unit.status = ActiveStatus() + + # Generic relation actions + def _on_get_relation_field(self, event: ActionEvent): + """Get requested relation field (OTHER side).""" + source, relation = self._get_relation(event.params["relation_id"]) + value = source.interface.repository(relation.id, relation.app).get_field( + event.params["field"] + ) + event.set_results({"value": value if value else ""}) + + def _on_get_relation_self_side_field(self, event: ActionEvent): + """Get requested relation field (OTHER side).""" + source, relation = self._get_relation(event.params["relation_id"]) + value = source.interface.repository(relation.id).get_field(event.params["field"]) + event.set_results({"value": value if value else ""}) + + def _on_set_relation_field(self, event: ActionEvent): + """Set requested relation field on self-side (that's the only one writeable).""" + source, relation = self._get_relation(event.params["relation_id"]) + source.interface.repository(relation.id).write_field( + event.params["field"], event.params["value"] + ) + + def _on_delete_relation_field(self, event: ActionEvent): + """Delete requested relation field on self-side (that's the only one writeable).""" + source, relation = self._get_relation(event.params["relation_id"]) + source.interface.repository(relation.id).delete_field(event.params["field"]) + + # First database events observers. + def _on_first_database_created(self, event: ResourceCreatedEvent) -> None: + """Event triggered when a database was created for this application.""" + # Retrieve the credentials using the charm library. + logger.info( + f"first database credentials: {event.response.username} {event.response.password}" + ) + self.unit.status = ActiveStatus("received database credentials of the first database") + + def _on_first_database_endpoints_changed(self, event: ResourceEndpointsChangedEvent) -> None: + """Event triggered when the read/write endpoints of the database change.""" + logger.info(f"first database endpoints have been changed to: {event.response.endpoints}") + + def _on_first_database_entity_created(self, event: ResourceEntityCreatedEvent) -> None: + """Event triggered when a database entity was created for this application.""" + # Retrieve the credentials using the charm library. + logger.info(f"first database entity credentials: {event.response.entity_name}") + self.unit.status = ActiveStatus("received entity credentials of the first database") + + # Second database events observers. + def _on_second_database_created(self, event: ResourceCreatedEvent) -> None: + """Event triggered when a database was created for this application.""" + # Retrieve the credentials using the charm library. + logger.info( + f"second database credentials: {event.response.username} {event.response.password}" + ) + self.unit.status = ActiveStatus("received database credentials of the second database") + + def _on_second_database_endpoints_changed(self, event: ResourceEndpointsChangedEvent) -> None: + """Event triggered when the read/write endpoints of the database change.""" + logger.info(f"second database endpoints have been changed to: {event.response.endpoints}") + + # Multiple database clusters events observers. + def _on_cluster_database_created(self, event: ResourceCreatedEvent) -> None: + """Event triggered when a database was created for this application.""" + # Retrieve the credentials using the charm library. + logger.info( + f"cluster {event.relation.app.name} credentials: {event.response.username} {event.response.password}" + ) + self.unit.status = ActiveStatus( + f"received database credentials for cluster {event.relation.app.name}" + ) + + def _on_cluster_endpoints_changed(self, event: ResourceEndpointsChangedEvent) -> None: + """Event triggered when the read/write endpoints of the database change.""" + logger.info( + f"cluster {event.relation.app.name} endpoints have been changed to: {event.response.endpoints}" + ) + + # Multiple database clusters events observers (for aliased clusters/relations). + def _on_cluster1_database_created(self, event: ResourceCreatedEvent) -> None: + """Event triggered when a database was created for this application.""" + # Retrieve the credentials using the charm library. + logger.info(f"cluster1 credentials: {event.response.username} {event.response.password}") + self.unit.status = ActiveStatus("received database credentials for cluster1") + + def _on_cluster1_endpoints_changed(self, event: ResourceEndpointsChangedEvent) -> None: + """Event triggered when the read/write endpoints of the database change.""" + logger.info(f"cluster1 endpoints have been changed to: {event.response.endpoints}") + + def _on_cluster2_database_created(self, event: ResourceCreatedEvent) -> None: + """Event triggered when a database was created for this application.""" + # Retrieve the credentials using the charm library. + logger.info(f"cluster2 credentials: {event.response.username} {event.response.password}") + self.unit.status = ActiveStatus("received database credentials for cluster2") + + def _on_cluster2_endpoints_changed(self, event: ResourceEndpointsChangedEvent) -> None: + """Event triggered when the read/write endpoints of the database change.""" + logger.info(f"cluster2 endpoints have been changed to: {event.response.endpoints}") + + # def _on_get_plugin_status(self, event: ActionEvent) -> None: + # """Returns the PostgreSQL plugin status (enabled/disabled).""" + # plugin = event.params.get("plugin") + # if not plugin: + # event.fail("Please provide a plugin name") + # return + + # plugin_status = ( + # "enabled" if self.first_database.is_postgresql_plugin_enabled(plugin) else "disabled" + # ) + # event.set_results({"plugin-status": plugin_status}) + + def _on_kafka_bootstrap_server_changed(self, event: ResourceEndpointsChangedEvent): + """Event triggered when a bootstrap server was changed for this application.""" + logger.info( + f"On kafka boostrap-server changed: bootstrap-server: {event.response.endpoints}" + ) + self.unit.status = ActiveStatus("kafka_bootstrap_server_changed") + + def _on_kafka_topic_created(self, _: ResourceCreatedEvent): + """Event triggered when a topic was created for this application.""" + logger.info("On kafka topic created") + self.unit.status = ActiveStatus("kafka_topic_created") + + def _on_kafka_entity_created(self, _: ResourceEntityCreatedEvent) -> None: + """Event triggered when a topic entity was created for this application.""" + logger.info("On kafka entity created") + self.unit.status = ActiveStatus("kafka_entity_created") + + def _on_connect_integration_created(self, _: ResourceCreatedEvent): + """Event triggered when Kafka Connect integration credentials are created for this application.""" + self.unit.status = ActiveStatus("connect_integration_created") + + def _on_connect_endpoints_changed(self, _: ResourceEndpointsChangedEvent): + """Event triggered when Kafka Connect REST endpoints change.""" + self.unit.status = ActiveStatus("connect_endpoints_changed") + + def _on_opensearch_index_created(self, _: ResourceCreatedEvent): + """Event triggered when an index was created for this application.""" + logger.info("On opensearch index created event fired") + self.unit.status = ActiveStatus("opensearch_index_created") + + def _on_opensearch_entity_created(self, _: ResourceEntityCreatedEvent): + """Event triggered when an index entity was created for this application.""" + logger.info("On opensearch entity created event fired") + self.unit.status = ActiveStatus("opensearch_entity_created") + + def _on_opensearch_authentication_updated(self, _: ResourceCreatedEvent): + """Event triggered when an index was created for this application.""" + logger.info("On opensearch authentication_updated event fired") + self.unit.status = ActiveStatus("opensearch_authentication_updated") + + def _on_reset_unit_status(self, _: ActionEvent): + """Handle the reset of status message for the unit.""" + self.unit.status = ActiveStatus() + + def _on_set_mtls_cert(self, event: ActionEvent): + """Sets the MTLS cert for the relation.""" + cmd = f'openssl req -new -newkey rsa:2048 -days 365 -nodes -subj "/CN={self.unit.name.replace("/", "-")}" -x509 -keyout client.key -out client.pem' + subprocess.check_output(cmd, shell=True, universal_newlines=True) + cert = open("./client.pem", "r").read() + relation = self.model.get_relation("kafka-split-pattern-client") + assert relation + model = self.kafka_split_pattern.interface.build_model( + relation.id, RequirerDataContractV1[KafkaRequestModel], component=self.app + ) + for response in model.requests: + response.mtls_cert = SecretStr(cert) + self.kafka_split_pattern.interface.write_model(relation.id, model) + event.set_results({"mtls-cert": cert}) + + +if __name__ == "__main__": + main(ApplicationCharm) diff --git a/tests/v1/integration/backward-compatibility-charm/charmcraft.yaml b/tests/v1/integration/backward-compatibility-charm/charmcraft.yaml new file mode 100644 index 00000000..b4317391 --- /dev/null +++ b/tests/v1/integration/backward-compatibility-charm/charmcraft.yaml @@ -0,0 +1,84 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +type: charm +# Whenever "bases" is changed: +# - Update tests/integration/conftest.py::pytest_configure() +# - Update .github/workflow/ci.yaml integration-test matrix +platforms: + ubuntu@22.04:amd64: + ubuntu@24.04:amd64: + +# Files implicitly created by charmcraft without a part: +# - dispatch (https://github.com/canonical/charmcraft/pull/1898) +# - manifest.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L259) +# Files implicitly copied/"staged" by charmcraft without a part: +# - actions.yaml, config.yaml, metadata.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L290-L293 +# https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L156-L157) +parts: + poetry-deps: + plugin: nil + build-packages: + - curl + override-build: | + # Use environment variable instead of `--break-system-packages` to avoid failing on older + # versions of pip that do not recognize `--break-system-packages` + # `--user` needed (in addition to `--break-system-packages`) for Ubuntu >=24.04 + PIP_BREAK_SYSTEM_PACKAGES=true python3 -m pip install --user --upgrade pip==24.3.1 # renovate: charmcraft-pip-latest + + # Use uv to install poetry so that a newer version of Python can be installed if needed by poetry + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.5.15/uv-installer.sh | sh # renovate: charmcraft-uv-latest + # poetry 2.0.0 requires Python >=3.9 + if ! "$HOME/.local/bin/uv" python find '>=3.9' + then + # Use first Python version that is >=3.9 and available in an Ubuntu LTS + # (to reduce the number of Python versions we use) + "$HOME/.local/bin/uv" python install 3.10.12 # renovate: charmcraft-python-ubuntu-22.04 + fi + "$HOME/.local/bin/uv" tool install --no-python-downloads --python '>=3.9' poetry==2.0.0 --with poetry-plugin-export==1.8.0 # renovate: charmcraft-poetry-latest + + ln -sf "$HOME/.local/bin/poetry" /usr/local/bin/poetry + # "charm-poetry" part name is arbitrary; use for consistency + # Avoid using "charm" part name since that has special meaning to charmcraft + charm-poetry: + # By default, the `poetry` plugin creates/stages these directories: + # - lib, src + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L76-L78) + # - venv + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L95 + # https://github.com/canonical/craft-parts/blob/afb0d652eb330b6aaad4f40fbd6e5357d358de47/craft_parts/plugins/base.py#L270) + plugin: poetry + source: . + after: + - poetry-deps + poetry-export-extra-args: ['--only', 'main,charm-libs'] + build-packages: + - libffi-dev # Needed to build Python dependencies with Rust from source + - libssl-dev # Needed to build Python dependencies with Rust from source + - pkg-config # Needed to build Python dependencies with Rust from source + override-build: | + # Workaround for https://github.com/canonical/charmcraft/issues/2068 + # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source + if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] + then + snap install rustup --classic + else + apt-get install rustup -y + fi + + # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu + # archive—which means the rustup version could be updated at any time. Print rustup version + # to build log to make changes to the snap's rustup version easier to track + rustup --version + + # rpds-py (Python package) >=0.19.0 requires rustc >=1.76, which is not available in the + # Ubuntu 22.04 archive. Install rustc and cargo using rustup instead of the Ubuntu archive + rustup set profile minimal + rustup default 1.83.0 # renovate: charmcraft-rust-latest + + + craftctl default + # Include requirements.txt in *.charm artifact for easier debugging + cp requirements.txt "$CRAFT_PART_INSTALL/requirements.txt" diff --git a/tests/v1/integration/backward-compatibility-charm/metadata.yaml b/tests/v1/integration/backward-compatibility-charm/metadata.yaml new file mode 100644 index 00000000..898da718 --- /dev/null +++ b/tests/v1/integration/backward-compatibility-charm/metadata.yaml @@ -0,0 +1,12 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. +name: client +description: | + Data platform libs client charm. +summary: | + Data platform libs database meant to be used + only for testing of the libs in this repository. + +requires: + backward-database: + interface: database_client diff --git a/tests/v1/integration/backward-compatibility-charm/poetry.lock b/tests/v1/integration/backward-compatibility-charm/poetry.lock new file mode 100644 index 00000000..a1670897 --- /dev/null +++ b/tests/v1/integration/backward-compatibility-charm/poetry.lock @@ -0,0 +1,341 @@ +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + +[[package]] +name = "opentelemetry-api" +version = "1.36.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "opentelemetry_api-1.36.0-py3-none-any.whl", hash = "sha256:02f20bcacf666e1333b6b1f04e647dc1d5111f86b8e510238fcc56d7762cda8c"}, + {file = "opentelemetry_api-1.36.0.tar.gz", hash = "sha256:9a72572b9c416d004d492cbc6e61962c0501eaf945ece9b5a0f56597d8348aa0"}, +] + +[package.dependencies] +importlib-metadata = ">=6.0,<8.8.0" +typing-extensions = ">=4.5.0" + +[[package]] +name = "ops" +version = "2.23.1" +description = "The Python library behind great charms" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "ops-2.23.1-py3-none-any.whl", hash = "sha256:fdf58163beafd25180c12a4c7efaf1e76e5f8710508a97840c07055bb78b0c77"}, + {file = "ops-2.23.1.tar.gz", hash = "sha256:aecacd67ef7ca913f63f397e0330bfa93d70529a3ef71ed2d99e2bc232564ae3"}, +] + +[package.dependencies] +importlib-metadata = "*" +opentelemetry-api = ">=1.0,<2.0" +PyYAML = "==6.*" +websocket-client = "==1.*" + +[package.extras] +testing = ["ops-scenario (==7.23.1)"] +tracing = ["ops-tracing (==2.23.1)"] + +[[package]] +name = "pydantic" +version = "2.11.7" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, + {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "typing-extensions" +version = "4.14.1" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, + {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "zipp" +version = "3.23.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.1" +python-versions = "^3.10.12" +content-hash = "3e90339ce16375daef4d9549ee45a773e9691caf1f8e2141decd3454017036e5" diff --git a/tests/v1/integration/backward-compatibility-charm/pyproject.toml b/tests/v1/integration/backward-compatibility-charm/pyproject.toml new file mode 100644 index 00000000..8ff16202 --- /dev/null +++ b/tests/v1/integration/backward-compatibility-charm/pyproject.toml @@ -0,0 +1,18 @@ +[tool.poetry] +package-mode = false +requires-poetry = ">=2.0.0" + +[tool.poetry.dependencies] +python = "^3.10.12" +ops = ">=2.0.0,<3.0.0" +pydantic = ">=2.11" + +[tool.poetry.group.charm-libs.dependencies] +ops = ">=2.0.0" +pydantic = ">=2.11" + +[tool.poetry.requires-plugins] +poetry-plugin-export = ">=1.8" + +[build-system] +build-backend = "poetry.core.masonry.api" diff --git a/tests/v1/integration/backward-compatibility-charm/src/charm.py b/tests/v1/integration/backward-compatibility-charm/src/charm.py new file mode 100755 index 00000000..84b5fd52 --- /dev/null +++ b/tests/v1/integration/backward-compatibility-charm/src/charm.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Client charm that creates connection to database charm. + +This charm is meant to be used only for testing of the libraries in this repository. +This uses the v0 of data interfaces to ensure that the compatibility is not broken. +""" + +import logging + +from ops.charm import CharmBase +from ops.framework import StoredState +from ops.main import main +from ops.model import ActiveStatus + +from charms.data_platform_libs.v0.data_interfaces import DatabaseCreatedEvent, DatabaseRequires + +logger = logging.getLogger(__name__) + + +class ClientCharm(CharmBase): + """Database charm that accepts connections from application charms.""" + + _stored = StoredState() + + def __init__(self, *args): + super().__init__(*args) + + # Charm events defined in the database provides charm library. + self.database = DatabaseRequires(self, "backward-database", "bwclient") + self.framework.observe(self.database.on.database_created, self._on_resource_created) + + def _on_resource_created(self, event: DatabaseCreatedEvent) -> None: + """Event triggered when a new database is requested.""" + relation_id = event.relation.id + username = event.username + password = event.password + database = event.database + + logger.error( + f"Database {database} created for relation {relation_id} with user {username} and password {password}" + ) + self.unit.status = ActiveStatus("backward_database_created") + + +if __name__ == "__main__": + main(ClientCharm) diff --git a/tests/v1/integration/conftest.py b/tests/v1/integration/conftest.py new file mode 100644 index 00000000..f4f584b8 --- /dev/null +++ b/tests/v1/integration/conftest.py @@ -0,0 +1,215 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +import logging +import os +import shutil +from datetime import datetime +from pathlib import Path +from subprocess import check_call, check_output + +import pytest +from pytest_operator.plugin import OpsTest + +logger = logging.getLogger(__name__) + + +@pytest.fixture(scope="session") +def dp_libs_ubuntu_series(pytestconfig) -> str: + if pytestconfig.option.os_series: + return pytestconfig.option.os_series + + +@pytest.fixture(scope="module") +def ops_test(ops_test: OpsTest, pytestconfig) -> OpsTest: + """Re-defining OpsTest.build_charm in a way that it takes CI caching and build parameters into account. + + Build parameters (for charms available for multiple OS versions) are considered both when building the + charm, or when fetching pre-built, CI cached version of it. + """ + _build_charm = ops_test.build_charm + + # Add bases_index option (indicating which OS version to use) + # when building the charm within the scope of the test run + async def build_charm(charm_path, bases_index: int = None) -> Path: + if not bases_index and pytestconfig.option.build_bases_index is not None: + bases_index = pytestconfig.option.build_bases_index + + logger.info(f"Building charm {charm_path} with base index {bases_index}") + + return await _build_charm(charm_path, bases_index=bases_index) + + ops_test.build_charm = build_charm + return ops_test + + +@pytest.fixture(scope="module", autouse=True) +def copy_data_interfaces_library_into_charm(ops_test: OpsTest): + """Copy the data_interfaces library to the different charm folder.""" + library_path = "lib/charms/data_platform_libs/v1/data_interfaces.py" + install_path = "tests/v1/integration/database-charm/" + library_path + shutil.copyfile(library_path, install_path) + install_path = "tests/v1/integration/dummy-database-charm/" + library_path + shutil.copyfile(library_path, install_path) + install_path = "tests/v1/integration/kafka-charm/" + library_path + shutil.copyfile(library_path, install_path) + install_path = "tests/v1/integration/application-charm/" + library_path + shutil.copyfile(library_path, install_path) + install_path = "tests/v1/integration/opensearch-charm/" + library_path + shutil.copyfile(library_path, install_path) + install_path = "tests/v1/integration/kafka-connect-charm/" + library_path + shutil.copyfile(library_path, install_path) + + +@pytest.fixture(scope="module", autouse=True) +def copy_s3_library_into_charm(ops_test: OpsTest): + """Copy the s3 library to the applications charm folder.""" + library_path = "lib/charms/data_platform_libs/v0/s3.py" + install_path_provider = "tests/v0/integration/s3-charm/" + library_path + install_path_requirer = "tests/v0/integration/application-s3-charm/" + library_path + shutil.copyfile(library_path, install_path_provider) + shutil.copyfile(library_path, install_path_requirer) + + +@pytest.fixture(scope="module") +async def application_charm(ops_test: OpsTest): + """Build the application charm.""" + charm_path = "tests/v1/integration/application-charm" + charm = await ops_test.build_charm(charm_path) + return charm + + +@pytest.fixture(scope="module") +async def backward_compatibility_charm(ops_test: OpsTest): + """Build a v0 charm to integrate with a v1 client.""" + charm_path = "tests/v1/integration/backward-compatibility-charm" + charm = await ops_test.build_charm(charm_path) + return charm + + +@pytest.fixture(scope="module") +async def database_charm(ops_test: OpsTest): + """Build the database charm.""" + charm_path = "tests/v1/integration/database-charm" + charm = await ops_test.build_charm(charm_path) + return charm + + +@pytest.fixture(scope="module") +async def dummy_database_charm(ops_test: OpsTest): + """Build the database charm.""" + charm_path = "tests/v1/integration/dummy-database-charm" + charm = await ops_test.build_charm(charm_path) + return charm + + +@pytest.fixture(scope="module") +async def application_s3_charm(ops_test: OpsTest): + """Build the application-s3 charm.""" + charm_path = "tests/v1/integration/application-s3-charm" + charm = await ops_test.build_charm(charm_path) + return charm + + +@pytest.fixture(scope="module") +async def s3_charm(ops_test: OpsTest): + """Build the S3 charm.""" + charm_path = "tests/v1/integration/s3-charm" + charm = await ops_test.build_charm(charm_path) + return charm + + +@pytest.fixture(scope="module") +async def kafka_charm(ops_test: OpsTest): + """Build the Kafka charm.""" + charm_path = "tests/v1/integration/kafka-charm" + charm = await ops_test.build_charm(charm_path) + return charm + + +@pytest.fixture(scope="module") +async def kafka_connect_charm(ops_test: OpsTest): + """Build the Kafka Connect dummy charm.""" + charm_path = "tests/v1/integration/kafka-connect-charm" + charm = await ops_test.build_charm(charm_path) + return charm + + +@pytest.fixture(scope="module") +async def opensearch_charm(ops_test: OpsTest): + """Build the OpenSearch charm. + + TODO we could simplify a lot of these charm builds by having a single test charm that includes + all these relations. This might be easily achieved by merging this repo with the + data-integrator charm repo. + """ + charm_path = "tests/v1/integration/opensearch-charm" + charm = await ops_test.build_charm(charm_path) + return charm + + +@pytest.fixture(autouse=True) +async def without_errors(ops_test: OpsTest, request): + """This fixture is to list all those errors that mustn't occur during execution.""" + # To be executed after the tests + now = datetime.now().strftime("%H:%M:%S.%f")[:-3] + yield + whitelist = [] + if "log_errors_allowed" in request.keywords: + for marker in [ + mark for mark in request.node.iter_markers() if mark.name == "log_errors_allowed" + ]: + for arg in marker.args: + whitelist.append(arg) + + # All errors allowed + if not whitelist: + return + + _, dbg_log, _ = await ops_test.juju("debug-log", "--ms", "--replay") + lines = dbg_log.split("\n") + for index, line in enumerate(lines): + logitems = line.split(" ") + if not line or len(logitems) < 3: + continue + if logitems[1] < now: + continue + if logitems[2] == "ERROR": + assert any(white in line for white in whitelist) + + +@pytest.fixture(scope="session") +def fetch_old_versions(): + """Fetching the previous 4 versions of the lib for upgrade tests.""" + cwd = os.getcwd() + src_path = "lib/charms/data_platform_libs/v1/data_interfaces.py" + data_path = f"{cwd}/tests/v1/integration/data/data_interfaces.py" + tmp_path = "./tmp_repo_checkout" + + os.mkdir(tmp_path) + os.chdir(tmp_path) + check_call("git clone https://github.com/canonical/data-platform-libs.git", shell=True) + os.chdir("data-platform-libs") + last_commits = check_output( + "git show --pretty=format:'%h' --no-patch -25", shell=True, universal_newlines=True + ).split() + + versions = [] + for commit in last_commits: + check_call(f"git checkout {commit}", shell=True) + version = check_output( + "grep ^LIBPATCH lib/charms/data_platform_libs/v1/data_interfaces.py | cut -d ' ' -f 3", + shell=True, + universal_newlines=True, + ) + version = version.strip() + if version not in versions: + shutil.copyfile(src_path, f"{data_path}.v{version}") + versions.append(version) + + if len(versions) == 7: + break + + os.chdir(cwd) + shutil.rmtree(tmp_path) diff --git a/tests/v1/integration/database-charm/actions.yaml b/tests/v1/integration/database-charm/actions.yaml new file mode 100644 index 00000000..710d8a68 --- /dev/null +++ b/tests/v1/integration/database-charm/actions.yaml @@ -0,0 +1,140 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. +change-admin-password: + description: Change the admin password to a new, generated value + +set-secret: + description: Change the value of a particular secret + params: + relation_id: + type: integer + description: The relation's unique ID + field: + type: string + description: The mapping name, i.e. what databag field ('secret-') is to be changed + +get-relation-field: + description: Set fields from the relation + params: + relation_id: + type: integer + description: The relation's unique ID + field: + type: string + description: Relation field + +get-relation-self-side-field: + description: Set fields from the relation + params: + relation_id: + type: integer + description: The relation's unique ID + field: + type: string + description: Relation field + +set-relation-field: + description: Set fields from the relation + params: + relation_id: + type: integer + description: The relation's unique ID + field: + type: string + description: Relation field + value: + type: string + description: Value of the field to set + +delete-relation-field: + description: Delete fields from the relation + params: + relation_id: + type: integer + description: The relation's unique ID + field: + type: string + description: Relation field + +get-peer-relation-field: + description: Get fields from the second-database relation + params: + component: + type: string + description: app/unit + field: + type: string + description: Relation field + +set-peer-relation-field: + description: Set fields from the second-database relation + params: + component: + type: string + description: app/unit + field: + type: string + description: Relation field + value: + type: string + description: Value of the field to set + +set-peer-relation-field-multiple: + description: Set fields from the second-database relation multiple times + params: + component: + type: string + description: app/unit + field: + type: string + description: Relation field + value: + type: string + description: Value of the field to set + count: + type: integer + description: Number of iterations + default: 3 + +set-peer-secret: + description: Set fields from the second-database relation + params: + component: + type: string + description: app/unit + field: + type: string + description: Relation field + value: + type: string + description: Value of the field to set + group: + type: string + default: '' + +delete-peer-relation-field: + description: Delete fields from the sedond-database relation + params: + component: + type: string + description: app/unit + field: + type: string + description: Relation field + +delete-peer-secret: + description: Delete Peer secret + params: + component: + type: string + description: app/unit + group: + type: string + default: '' + +get-other-peer-relation-field: + description: Get fields from the second-database relation + params: + field: + type: string + description: Relation field diff --git a/tests/v1/integration/database-charm/charmcraft.yaml b/tests/v1/integration/database-charm/charmcraft.yaml new file mode 100644 index 00000000..a07cac5e --- /dev/null +++ b/tests/v1/integration/database-charm/charmcraft.yaml @@ -0,0 +1,95 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +type: charm +# Whenever "bases" is changed: +# - Update tests/integration/conftest.py::pytest_configure() +# - Update .github/workflow/ci.yaml integration-test matrix +platforms: + ubuntu@22.04:amd64: + ubuntu@24.04:amd64: + +# Files implicitly created by charmcraft without a part: +# - dispatch (https://github.com/canonical/charmcraft/pull/1898) +# - manifest.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L259) +# Files implicitly copied/"staged" by charmcraft without a part: +# - actions.yaml, config.yaml, metadata.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L290-L293 +# https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L156-L157) +parts: + poetry-deps: + plugin: nil + build-packages: + - curl + override-build: | + # Use environment variable instead of `--break-system-packages` to avoid failing on older + # versions of pip that do not recognize `--break-system-packages` + # `--user` needed (in addition to `--break-system-packages`) for Ubuntu >=24.04 + PIP_BREAK_SYSTEM_PACKAGES=true python3 -m pip install --user --upgrade pip==24.3.1 # renovate: charmcraft-pip-latest + + # Use uv to install poetry so that a newer version of Python can be installed if needed by poetry + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.5.15/uv-installer.sh | sh # renovate: charmcraft-uv-latest + # poetry 2.0.0 requires Python >=3.9 + if ! "$HOME/.local/bin/uv" python find '>=3.9' + then + # Use first Python version that is >=3.9 and available in an Ubuntu LTS + # (to reduce the number of Python versions we use) + "$HOME/.local/bin/uv" python install 3.10.12 # renovate: charmcraft-python-ubuntu-22.04 + fi + "$HOME/.local/bin/uv" tool install --no-python-downloads --python '>=3.9' poetry==2.0.0 --with poetry-plugin-export==1.8.0 # renovate: charmcraft-poetry-latest + + ln -sf "$HOME/.local/bin/poetry" /usr/local/bin/poetry + # "charm-poetry" part name is arbitrary; use for consistency + # Avoid using "charm" part name since that has special meaning to charmcraft + charm-poetry: + # By default, the `poetry` plugin creates/stages these directories: + # - lib, src + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L76-L78) + # - venv + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L95 + # https://github.com/canonical/craft-parts/blob/afb0d652eb330b6aaad4f40fbd6e5357d358de47/craft_parts/plugins/base.py#L270) + plugin: poetry + source: . + after: + - poetry-deps + poetry-export-extra-args: ['--only', 'main,charm-libs'] + build-packages: + - libffi-dev # Needed to build Python dependencies with Rust from source + - libssl-dev # Needed to build Python dependencies with Rust from source + - pkg-config # Needed to build Python dependencies with Rust from source + - libpq-dev + override-build: | + # Workaround for https://github.com/canonical/charmcraft/issues/2068 + # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source + if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] + then + snap install rustup --classic + else + apt-get install rustup -y + fi + + # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu + # archive—which means the rustup version could be updated at any time. Print rustup version + # to build log to make changes to the snap's rustup version easier to track + rustup --version + + # rpds-py (Python package) >=0.19.0 requires rustc >=1.76, which is not available in the + # Ubuntu 22.04 archive. Install rustc and cargo using rustup instead of the Ubuntu archive + rustup set profile minimal + rustup default 1.83.0 # renovate: charmcraft-rust-latest + + + craftctl default + # Include requirements.txt in *.charm artifact for easier debugging + cp requirements.txt "$CRAFT_PART_INSTALL/requirements.txt" + libpq: + build-packages: + - libpq-dev + plugin: dump + source: /usr/lib/ + source-type: local + stage: + - lib/ + organize: + "*-linux-gnu/libpq.so*": lib/ diff --git a/tests/v1/integration/database-charm/lib/charms/data_platform_libs/v1/.gitkeep b/tests/v1/integration/database-charm/lib/charms/data_platform_libs/v1/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/tests/v1/integration/database-charm/metadata.yaml b/tests/v1/integration/database-charm/metadata.yaml new file mode 100644 index 00000000..940cbc04 --- /dev/null +++ b/tests/v1/integration/database-charm/metadata.yaml @@ -0,0 +1,34 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. +name: database +description: | + Data platform libs database charm used in integration tests. +summary: | + Data platform libs database meant to be used + only for testing of the libs in this repository. + +containers: + database: + resource: database-image + mounts: + - storage: database + location: /var/lib/postgresql/data/pgdata + +resources: + database-image: + type: oci-image + description: OCI image for database + upstream-source: ubuntu/postgres@sha256:f0b7dcc3088c018ebcd90dd8b4e9007b094fd180d5a12f5be3e7120914ac159d + +peers: + database-peers: + interface: database-peers + +provides: + database: + interface: database_client + +storage: + database: + type: filesystem + location: /var/lib/postgresql/data/pgdata diff --git a/tests/v1/integration/database-charm/poetry.lock b/tests/v1/integration/database-charm/poetry.lock new file mode 100644 index 00000000..073f1f1d --- /dev/null +++ b/tests/v1/integration/database-charm/poetry.lock @@ -0,0 +1,361 @@ +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + +[[package]] +name = "opentelemetry-api" +version = "1.36.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "opentelemetry_api-1.36.0-py3-none-any.whl", hash = "sha256:02f20bcacf666e1333b6b1f04e647dc1d5111f86b8e510238fcc56d7762cda8c"}, + {file = "opentelemetry_api-1.36.0.tar.gz", hash = "sha256:9a72572b9c416d004d492cbc6e61962c0501eaf945ece9b5a0f56597d8348aa0"}, +] + +[package.dependencies] +importlib-metadata = ">=6.0,<8.8.0" +typing-extensions = ">=4.5.0" + +[[package]] +name = "ops" +version = "2.23.1" +description = "The Python library behind great charms" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "ops-2.23.1-py3-none-any.whl", hash = "sha256:fdf58163beafd25180c12a4c7efaf1e76e5f8710508a97840c07055bb78b0c77"}, + {file = "ops-2.23.1.tar.gz", hash = "sha256:aecacd67ef7ca913f63f397e0330bfa93d70529a3ef71ed2d99e2bc232564ae3"}, +] + +[package.dependencies] +importlib-metadata = "*" +opentelemetry-api = ">=1.0,<2.0" +PyYAML = "==6.*" +websocket-client = "==1.*" + +[package.extras] +testing = ["ops-scenario (==7.23.1)"] +tracing = ["ops-tracing (==2.23.1)"] + +[[package]] +name = "psycopg2" +version = "2.9.10" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, + {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, + {file = "psycopg2-2.9.10-cp311-cp311-win32.whl", hash = "sha256:47c4f9875125344f4c2b870e41b6aad585901318068acd01de93f3677a6522c2"}, + {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, + {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, + {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, + {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"}, + {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, + {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, + {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, +] + +[[package]] +name = "pydantic" +version = "2.11.7" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, + {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "typing-extensions" +version = "4.14.1" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, + {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "zipp" +version = "3.23.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.1" +python-versions = "^3.10.12" +content-hash = "33b42b6706f332be97f99c3f84eda369f2a2ef185c09d5fd794a3daea17b55f3" diff --git a/tests/v1/integration/database-charm/pyproject.toml b/tests/v1/integration/database-charm/pyproject.toml new file mode 100644 index 00000000..eb122466 --- /dev/null +++ b/tests/v1/integration/database-charm/pyproject.toml @@ -0,0 +1,20 @@ +[tool.poetry] +package-mode = false +requires-poetry = ">=2.0.0" + +[tool.poetry.dependencies] +python = "^3.10.12" +ops = ">=2.0.0,<3.0.0" +pydantic = ">=2.11" +psycopg2 = "^2.9.10" + +[tool.poetry.group.charm-libs.dependencies] +ops = ">=2.0.0" +pydantic = ">=2.11" +psycopg2 = "^2.9.10" + +[tool.poetry.requires-plugins] +poetry-plugin-export = ">=1.8" + +[build-system] +build-backend = "poetry.core.masonry.api" diff --git a/tests/v1/integration/database-charm/src/charm.py b/tests/v1/integration/database-charm/src/charm.py new file mode 100755 index 00000000..15965a46 --- /dev/null +++ b/tests/v1/integration/database-charm/src/charm.py @@ -0,0 +1,465 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Database charm that accepts connections from application charms. + +This charm is meant to be used only for testing +of the libraries in this repository. +""" + +import logging +import secrets +import string +from random import randrange +from time import sleep +from typing import Annotated, Optional + +import psycopg2 +from ops import Relation, Unit +from ops.charm import ActionEvent, CharmBase, WorkloadEvent +from ops.framework import StoredState +from ops.main import main +from ops.model import ActiveStatus, MaintenanceStatus +from pydantic import Field, SecretStr, TypeAdapter + +from charms.data_platform_libs.v1.data_interfaces import ( + DataContractV1, + ExtraSecretStr, + OpsOtherPeerUnitRepositoryInterface, + OpsPeerRepositoryInterface, + OpsPeerUnitRepositoryInterface, + OptionalSecretStr, + PeerModel, + RequirerCommonModel, + ResourceEntityRequestedEvent, + ResourceProviderEventHandler, + ResourceProviderModel, + ResourceRequestedEvent, + SecretBool, +) + +logger = logging.getLogger(__name__) + +SECRET_INTERNAL_LABEL = "internal-secret" +PEER = "database-peers" +APP_SECRETS = ["monitor-password", "secret-field"] +UNIT_SECRETS = ["monitor-password", "secret-field", "my-unit-secret"] + +MygroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] + + +class PeerAppModel(PeerModel): + monitor_password: ExtraSecretStr + secret_field: ExtraSecretStr + mygroup_field1: MygroupSecretStr = Field(default=None) + mygroup_field2: MygroupSecretStr = Field(default=None) + + +class PeerUnitModel(PeerModel): + monitor_password: ExtraSecretStr + secret_field: ExtraSecretStr + my_unit_secret: ExtraSecretStr + + +DataContract = TypeAdapter(DataContractV1[ResourceProviderModel]) + + +class DatabaseCharm(CharmBase): + """Database charm that accepts connections from application charms.""" + + _stored = StoredState() + + def __init__(self, *args): + super().__init__(*args) + self._servers_data = {} + + self._peer_relation_app = OpsPeerRepositoryInterface( + self, relation_name=PEER, model=PeerAppModel + ) + self._peer_relation_unit = OpsPeerUnitRepositoryInterface( + self, relation_name=PEER, model=PeerUnitModel + ) + + # Default charm events. + self.framework.observe(self.on.database_pebble_ready, self._on_database_pebble_ready) + + # Charm events defined in the database provides charm library. + # self.database = DatabaseProvides(self, relation_name="database") + self.database = ResourceProviderEventHandler(self, "database", RequirerCommonModel) + self.framework.observe(self.database.on.resource_requested, self._on_resource_requested) + self.framework.observe( + self.database.on.resource_entity_requested, self._on_resource_entity_requested + ) + + # Stored state is used to track the password of the database superuser. + self._stored.set_default(password=self._new_password()) + self.framework.observe( + self.on.change_admin_password_action, self._on_change_admin_password + ) + + self.framework.observe(self.on.set_secret_action, self._on_set_secret_action) + + # Get/set/delete values on second-database relaton + self.framework.observe( + self.on.get_relation_self_side_field_action, self._on_get_relation_self_side_field + ) + self.framework.observe(self.on.get_relation_field_action, self._on_get_relation_field) + self.framework.observe(self.on.set_relation_field_action, self._on_set_relation_field) + self.framework.observe( + self.on.delete_relation_field_action, self._on_delete_relation_field + ) + + # Get/set/delete values on second-database relaton + self.framework.observe( + self.on.get_peer_relation_field_action, self._on_get_peer_relation_field + ) + self.framework.observe( + self.on.set_peer_relation_field_action, self._on_set_peer_relation_field + ) + self.framework.observe( + self.on.set_peer_relation_field_multiple_action, + self._on_set_peer_relation_field_multiple, + ) + self.framework.observe(self.on.set_peer_secret_action, self._on_set_peer_secret) + self.framework.observe( + self.on.delete_peer_relation_field_action, self._on_delete_peer_relation_field + ) + self.framework.observe(self.on.delete_peer_secret_action, self._on_delete_peer_secret) + + self.framework.observe( + self.on.get_other_peer_relation_field_action, self._on_get_other_peer_relation_field + ) + + @property + def peer_relation(self) -> Optional[Relation]: + """The cluster peer relation.""" + return self.model.get_relation(PEER) + + @property + def peer_units_data_interfaces(self) -> dict[Unit, OpsOtherPeerUnitRepositoryInterface]: + """The cluster peer relation.""" + if not self.peer_relation or not self.peer_relation.units: + return {} + + for unit in self.peer_relation.units: + if unit not in self._servers_data: + self._servers_data[unit] = OpsOtherPeerUnitRepositoryInterface( + charm=self, + relation_name=PEER, + unit=unit, + ) + return self._servers_data + + def _on_change_admin_password(self, event: ActionEvent): + """Change the admin password.""" + password = self._new_password() + for relation in self.database.interface.relations: + model = self.database.interface.build_model(relation.id, DataContract) + for request in model.requests: + request.password = password # pyright: ignore[reportAttributeAccessIssue] + self.database.interface.write_model(relation.id, model) + + def _on_set_secret_action(self, event: ActionEvent): + """Change the admin password.""" + secret_field: str | None = event.params.get("field") + if not secret_field: + event.fail("Invalid empty field.") + return + password = event.params.get("value", self._new_password()) + for relation in self.database.interface.relations: + model = self.database.interface.build_model(relation.id, DataContract) + for request in model.requests: + setattr(request, secret_field, password) + + def _on_database_pebble_ready(self, event: WorkloadEvent) -> None: + """Define and start the database using the Pebble API.""" + container = event.workload + pebble_layer = { + "summary": "database layer", + "description": "pebble config layer for database", + "services": { + "database": { + "override": "replace", + "summary": "database", + "command": "/usr/local/bin/docker-entrypoint.sh postgres", + "startup": "enabled", + "environment": { + "PGDATA": "/var/lib/postgresql/data/pgdata", + "POSTGRES_PASSWORD": self._stored.password, + }, + } + }, + } + container.add_layer("database", pebble_layer, combine=True) + container.autostart() + self.unit.status = ActiveStatus() + + def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: + """Event triggered when a new database is requested.""" + self.unit.status = MaintenanceStatus("creating database") + + relation_id = event.relation.id + + request = event.request + + resource = request.resource + extra_user_roles = request.extra_user_roles + + username = f"relation_{relation_id}_{request.request_id}" + password = self._new_password() + connection_string = ( + "dbname='postgres' user='postgres' host='localhost' " + f"password='{self._stored.password}' connect_timeout=10" + ) + connection = psycopg2.connect(connection_string) + connection.autocommit = True + cursor = connection.cursor() + # Create the database, user and password. Also gives the user access to the database. + cursor.execute(f"CREATE DATABASE {resource};") + cursor.execute(f"CREATE USER {username} WITH ENCRYPTED PASSWORD '{password}';") + cursor.execute(f"GRANT ALL PRIVILEGES ON DATABASE {resource} TO {username};") + # Add the roles to the user. + if extra_user_roles: + cursor.execute(f"ALTER USER {username} {' '.join(extra_user_roles)};") + # Get the database version. + cursor.execute("SELECT version();") + version = cursor.fetchone()[0] + cursor.close() + connection.close() + + # Temporary hack to avoid https://bugs.launchpad.net/juju/+bug/2031631 + sleep(randrange(5)) + + assert self.model.get_binding("database") + assert self.model.get_binding("database").network + assert self.model.get_binding("database").network.bind_address + logger.info( + ( + f"Charm binding {self.model.get_binding('database')}, " + f"network: {self.model.get_binding('database').network}, " + f"IP: {self.model.get_binding('database').network.bind_address}" + ) + ) + + response = ResourceProviderModel( + request_id=request.request_id, + resource=resource, + password=SecretStr(password), + username=SecretStr(username), + endpoints=f"{self.model.get_binding('database').network.bind_address}:5432", + tls=SecretBool(False), + version=version, + ) + self.database.set_response(event.relation.id, response) + self.unit.status = ActiveStatus() + + def _on_resource_entity_requested(self, event: ResourceEntityRequestedEvent) -> None: + """Event triggered when a new database entity is requested.""" + self.unit.status = MaintenanceStatus("creating entity") + + request = event.request + + # Retrieve the entity-type using the charm library. + entity_type = request.entity_type + + # Generate a entity-name and a entity-password for the application. + rolename = self._new_rolename() + password = self._new_password() + + # Connect to the database. + connection_string = ( + "dbname='postgres' user='postgres' host='localhost' " + f"password='{self._stored.password}' connect_timeout=10" + ) + connection = psycopg2.connect(connection_string) + connection.autocommit = True + cursor = connection.cursor() + + # Create the role + if entity_type == "user": + extra_roles = request.extra_user_roles + cursor.execute(f"CREATE ROLE {rolename} WITH ENCRYPTED PASSWORD '{password}';") + cursor.execute(f"ALTER ROLE {rolename} {' '.join(extra_roles)};") + if entity_type == "group": + extra_roles = request.extra_group_roles + cursor.execute(f"CREATE ROLE {rolename};") + cursor.execute(f"ALTER ROLE {rolename} {' '.join(extra_roles)};") + + # Share the credentials with the application. + response = ResourceProviderModel( + request_id=request.request_id, + salt=request.salt, + entity_name=SecretStr(rolename), + entity_password=SecretStr(password), + ) + self.database.set_response(event.relation.id, response) + self.unit.status = ActiveStatus() + + def _get_relation(self, relation_id: int) -> Relation: + for relation in self.database.interface.relations: + if relation.id == relation_id: + return relation + raise ValueError(f"No relation {relation_id}") + + # Get/set/delete field on second-database relation + def _on_get_relation_field(self, event: ActionEvent): + """[second_database]: Get requested relation field.""" + relation = self._get_relation(event.params["relation_id"]) + value = None + repository = self.database.interface.repository(relation.id, component=relation.app) + + repository.get_field(event.params["field"]) + event.set_results({"value": value if value else ""}) + + def _on_get_relation_self_side_field(self, event: ActionEvent): + """[second_database]: Get requested relation field.""" + relation = self._get_relation(event.params["relation_id"]) + value = None + repository = self.database.interface.repository(relation.id) + + repository.get_field(event.params["field"]) + event.set_results({"value": value if value else ""}) + + def _on_set_relation_field(self, event: ActionEvent): + """Set requested relation field.""" + relation = self._get_relation(event.params["relation_id"]) + repository = self.database.interface.repository(relation.id) + repository.write_field(event.params["field"], event.params["value"]) + + def _on_delete_relation_field(self, event: ActionEvent): + """Delete requested relation field.""" + relation = self._get_relation(event.params["relation_id"]) + repository = self.database.interface.repository(relation.id) + repository.delete_field(event.params["field"]) + # Charms should be compatible with old vesrions, to simulatrams["field"]) + + def _new_rolename(self) -> str: + """Generate a random rolename string. + + Returns: + A random rolename string. + """ + choices = string.ascii_letters + return "".join([secrets.choice(choices) for _ in range(8)]) + + def _new_password(self) -> str: + """Generate a random password string. + + Returns: + A random password string. + """ + choices = string.ascii_letters + string.digits + return "".join([secrets.choice(choices) for _ in range(16)]) + + # Get/set/delete field on the peer relation + def _on_get_peer_relation_field(self, event: ActionEvent): + """[second_database]: Set requested relation field.""" + component = event.params["component"] + + value = None + + if component == "app": + relation = self._peer_relation_app.relations[0] + model = self._peer_relation_app.build_model(relation.id) + value = getattr(model, event.params["field"]) + else: + relation = self._peer_relation_unit.relations[0] + model = self._peer_relation_unit.build_model(relation.id) + value = getattr(model, event.params["field"]) + event.set_results({"value": value if value else ""}) + + def _on_set_peer_relation_field(self, event: ActionEvent): + """Set requested relation field.""" + component = event.params["component"] + if component == "app": + relation = self._peer_relation_app.relations[0] + model = self._peer_relation_app.build_model(relation.id) + setattr(model, event.params["field"], event.params["value"]) + self._peer_relation_app.write_model(relation.id, model) + else: + relation = self._peer_relation_unit.relations[0] + model = self._peer_relation_unit.build_model(relation.id) + setattr(model, event.params["field"], event.params["value"]) + self._peer_relation_unit.write_model(relation.id, model) + + def _on_set_peer_relation_field_multiple(self, event: ActionEvent): + """Set requested relation field.""" + component = event.params["component"] + count = event.params["count"] + + # Charms should be compatible with old vesrions, to simulate rolling upgrade + for cnt in range(count): + value = event.params["value"] + f"{cnt}" + if component == "app": + relation = self._peer_relation_app.relations[0] + repository = self._peer_relation_app.repository(relation.id) + repository.write_field(event.params["field"], value) + else: + relation = self._peer_relation_unit.relations[0] + repository = self._peer_relation_unit.repository(relation.id) + repository.write_field(event.params["field"], value) + + def _on_set_peer_secret(self, event: ActionEvent): + """Set requested relation field.""" + component = event.params["component"] + if component == "app": + relation = self._peer_relation_app.relations[0] + repository = self._peer_relation_app.repository(relation.id) + repository.write_secret_field( + event.params["field"], event.params["value"], secret_group=event.params["group"] + ) + else: + relation = self._peer_relation_unit.relations[0] + repository = self._peer_relation_unit.repository(relation.id) + repository.write_secret_field( + event.params["field"], event.params["value"], secret_group=event.params["group"] + ) + + def _on_delete_peer_relation_field(self, event: ActionEvent): + """Delete requested relation field.""" + component = event.params["component"] + + if component == "app": + relation = self._peer_relation_app.relations[0] + model = self._peer_relation_app.build_model(relation.id) + setattr(model, event.params["field"], None) + self._peer_relation_app.write_model(relation.id, model) + else: + relation = self._peer_relation_unit.relations[0] + model = self._peer_relation_unit.build_model(relation.id) + setattr(model, event.params["field"], None) + self._peer_relation_unit.write_model(relation.id, model) + + # Other Peer Data + def _on_get_other_peer_relation_field(self, event: ActionEvent): + """[second_database]: Get requested relation field.""" + value = {} + relation = self.model.get_relation(PEER) + if not relation: + event.fail("Missing relation") + return + for unit, interface in self.peer_units_data_interfaces.items(): + value[unit.name.replace("/", "-")] = interface.repository(relation.id).get_field( + event.params["field"] + ) + event.set_results(value) + + # Remove peer secrets + def _on_delete_peer_secret(self, event: ActionEvent): + """Delete requested relation field.""" + component = event.params["component"] + + secret = None + group_str = "" if not event.params["group"] else f".{event.params['group']}" + if component == "app": + secret = self.model.get_secret(label=f"{PEER}.database.app{group_str}") + else: + secret = self.model.get_secret(label=f"{PEER}.database.unit{group_str}") + + if secret: + secret.remove_all_revisions() + + +if __name__ == "__main__": + main(DatabaseCharm) diff --git a/tests/v1/integration/dummy-database-charm/actions.yaml b/tests/v1/integration/dummy-database-charm/actions.yaml new file mode 100644 index 00000000..984b133f --- /dev/null +++ b/tests/v1/integration/dummy-database-charm/actions.yaml @@ -0,0 +1,126 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +change-admin-password: + description: Change the admin password to a new, generated value + +set-secret: + description: Change the value of a particular secret + params: + relation_id: + type: integer + description: The relation's unique ID + field: + type: string + description: The mapping name, i.e. what databag field ('secret-') is to be changed + +get-relation-field: + description: Get fields from the relation + params: + relation_id: + type: integer + description: The relation's unique ID + field: + type: string + description: Relation field + +set-relation-field: + description: Set fields from the relation + params: + relation_id: + type: integer + description: The relation's unique ID + field: + type: string + description: Relation field + value: + type: string + description: Value of the field to set + +delete-relation-field: + description: Delete fields from the relation + params: + relation_id: + type: integer + description: The relation's unique ID + field: + type: string + description: Relation field + +get-peer-relation-field: + description: Get fields from the database relation + params: + component: + type: string + description: app/unit + field: + type: string + description: Relation field + +set-peer-relation-field: + description: Set fields from the database relation + params: + component: + type: string + description: app/unit + field: + type: string + description: Relation field + value: + type: string + description: Value of the field to set + +set-peer-secret: + description: Set peer relation secrets + params: + component: + type: string + description: app/unit + field: + type: string + description: Relation field + value: + type: string + description: Value of the field to set + group: + type: string + default: '' + +get-peer-secret: + description: Get peer relation secrets + params: + component: + type: string + description: app/unit + field: + type: string + description: Relation field + group: + type: string + default: '' + +delete-peer-relation-field: + description: Delete fields from the sedond-database relation + params: + component: + type: string + description: app/unit + field: + type: string + description: Relation field + +delete-peer-secret: + description: Delete Peer secret + params: + component: + type: string + description: app/unit + group: + type: string + default: '' + +get-other-peer-relation-field: + description: Get fields from the second-database relation + params: + field: + type: string + description: Relation field diff --git a/tests/v1/integration/dummy-database-charm/charmcraft.yaml b/tests/v1/integration/dummy-database-charm/charmcraft.yaml new file mode 100644 index 00000000..a07cac5e --- /dev/null +++ b/tests/v1/integration/dummy-database-charm/charmcraft.yaml @@ -0,0 +1,95 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +type: charm +# Whenever "bases" is changed: +# - Update tests/integration/conftest.py::pytest_configure() +# - Update .github/workflow/ci.yaml integration-test matrix +platforms: + ubuntu@22.04:amd64: + ubuntu@24.04:amd64: + +# Files implicitly created by charmcraft without a part: +# - dispatch (https://github.com/canonical/charmcraft/pull/1898) +# - manifest.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L259) +# Files implicitly copied/"staged" by charmcraft without a part: +# - actions.yaml, config.yaml, metadata.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L290-L293 +# https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L156-L157) +parts: + poetry-deps: + plugin: nil + build-packages: + - curl + override-build: | + # Use environment variable instead of `--break-system-packages` to avoid failing on older + # versions of pip that do not recognize `--break-system-packages` + # `--user` needed (in addition to `--break-system-packages`) for Ubuntu >=24.04 + PIP_BREAK_SYSTEM_PACKAGES=true python3 -m pip install --user --upgrade pip==24.3.1 # renovate: charmcraft-pip-latest + + # Use uv to install poetry so that a newer version of Python can be installed if needed by poetry + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.5.15/uv-installer.sh | sh # renovate: charmcraft-uv-latest + # poetry 2.0.0 requires Python >=3.9 + if ! "$HOME/.local/bin/uv" python find '>=3.9' + then + # Use first Python version that is >=3.9 and available in an Ubuntu LTS + # (to reduce the number of Python versions we use) + "$HOME/.local/bin/uv" python install 3.10.12 # renovate: charmcraft-python-ubuntu-22.04 + fi + "$HOME/.local/bin/uv" tool install --no-python-downloads --python '>=3.9' poetry==2.0.0 --with poetry-plugin-export==1.8.0 # renovate: charmcraft-poetry-latest + + ln -sf "$HOME/.local/bin/poetry" /usr/local/bin/poetry + # "charm-poetry" part name is arbitrary; use for consistency + # Avoid using "charm" part name since that has special meaning to charmcraft + charm-poetry: + # By default, the `poetry` plugin creates/stages these directories: + # - lib, src + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L76-L78) + # - venv + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L95 + # https://github.com/canonical/craft-parts/blob/afb0d652eb330b6aaad4f40fbd6e5357d358de47/craft_parts/plugins/base.py#L270) + plugin: poetry + source: . + after: + - poetry-deps + poetry-export-extra-args: ['--only', 'main,charm-libs'] + build-packages: + - libffi-dev # Needed to build Python dependencies with Rust from source + - libssl-dev # Needed to build Python dependencies with Rust from source + - pkg-config # Needed to build Python dependencies with Rust from source + - libpq-dev + override-build: | + # Workaround for https://github.com/canonical/charmcraft/issues/2068 + # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source + if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] + then + snap install rustup --classic + else + apt-get install rustup -y + fi + + # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu + # archive—which means the rustup version could be updated at any time. Print rustup version + # to build log to make changes to the snap's rustup version easier to track + rustup --version + + # rpds-py (Python package) >=0.19.0 requires rustc >=1.76, which is not available in the + # Ubuntu 22.04 archive. Install rustc and cargo using rustup instead of the Ubuntu archive + rustup set profile minimal + rustup default 1.83.0 # renovate: charmcraft-rust-latest + + + craftctl default + # Include requirements.txt in *.charm artifact for easier debugging + cp requirements.txt "$CRAFT_PART_INSTALL/requirements.txt" + libpq: + build-packages: + - libpq-dev + plugin: dump + source: /usr/lib/ + source-type: local + stage: + - lib/ + organize: + "*-linux-gnu/libpq.so*": lib/ diff --git a/tests/v1/integration/dummy-database-charm/lib/charms/data_platform_libs/v1/.gitkeep b/tests/v1/integration/dummy-database-charm/lib/charms/data_platform_libs/v1/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/tests/v1/integration/dummy-database-charm/metadata.yaml b/tests/v1/integration/dummy-database-charm/metadata.yaml new file mode 100644 index 00000000..91f998fa --- /dev/null +++ b/tests/v1/integration/dummy-database-charm/metadata.yaml @@ -0,0 +1,34 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. +name: dummy-database +description: | + Data platform libs database charm used in integration tests. +summary: | + Data platform libs database meant to be used + only for testing of the libs in this repository. + +containers: + database: + resource: database-image + mounts: + - storage: database + location: /var/lib/postgresql/data/pgdata + +resources: + database-image: + type: oci-image + description: OCI image for database + upstream-source: ubuntu/postgres@sha256:f0b7dcc3088c018ebcd90dd8b4e9007b094fd180d5a12f5be3e7120914ac159d + +peers: + database-peers: + interface: database-peers + +provides: + database: + interface: database_client + +storage: + database: + type: filesystem + location: /var/lib/postgresql/data/pgdata diff --git a/tests/v1/integration/dummy-database-charm/poetry.lock b/tests/v1/integration/dummy-database-charm/poetry.lock new file mode 100644 index 00000000..ff8cc84e --- /dev/null +++ b/tests/v1/integration/dummy-database-charm/poetry.lock @@ -0,0 +1,361 @@ +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + +[[package]] +name = "opentelemetry-api" +version = "1.36.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "opentelemetry_api-1.36.0-py3-none-any.whl", hash = "sha256:02f20bcacf666e1333b6b1f04e647dc1d5111f86b8e510238fcc56d7762cda8c"}, + {file = "opentelemetry_api-1.36.0.tar.gz", hash = "sha256:9a72572b9c416d004d492cbc6e61962c0501eaf945ece9b5a0f56597d8348aa0"}, +] + +[package.dependencies] +importlib-metadata = ">=6.0,<8.8.0" +typing-extensions = ">=4.5.0" + +[[package]] +name = "ops" +version = "2.23.1" +description = "The Python library behind great charms" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "ops-2.23.1-py3-none-any.whl", hash = "sha256:fdf58163beafd25180c12a4c7efaf1e76e5f8710508a97840c07055bb78b0c77"}, + {file = "ops-2.23.1.tar.gz", hash = "sha256:aecacd67ef7ca913f63f397e0330bfa93d70529a3ef71ed2d99e2bc232564ae3"}, +] + +[package.dependencies] +importlib-metadata = "*" +opentelemetry-api = ">=1.0,<2.0" +PyYAML = "==6.*" +websocket-client = "==1.*" + +[package.extras] +testing = ["ops-scenario (==7.23.1)"] +tracing = ["ops-tracing (==2.23.1)"] + +[[package]] +name = "psycopg2" +version = "2.9.10" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, + {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, + {file = "psycopg2-2.9.10-cp311-cp311-win32.whl", hash = "sha256:47c4f9875125344f4c2b870e41b6aad585901318068acd01de93f3677a6522c2"}, + {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, + {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, + {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, + {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"}, + {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, + {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, + {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, +] + +[[package]] +name = "pydantic" +version = "2.11.7" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, + {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "zipp" +version = "3.23.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +groups = ["main", "charm-libs"] +files = [ + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.1" +python-versions = "^3.10.12" +content-hash = "33b42b6706f332be97f99c3f84eda369f2a2ef185c09d5fd794a3daea17b55f3" diff --git a/tests/v1/integration/dummy-database-charm/pyproject.toml b/tests/v1/integration/dummy-database-charm/pyproject.toml new file mode 100644 index 00000000..eb122466 --- /dev/null +++ b/tests/v1/integration/dummy-database-charm/pyproject.toml @@ -0,0 +1,20 @@ +[tool.poetry] +package-mode = false +requires-poetry = ">=2.0.0" + +[tool.poetry.dependencies] +python = "^3.10.12" +ops = ">=2.0.0,<3.0.0" +pydantic = ">=2.11" +psycopg2 = "^2.9.10" + +[tool.poetry.group.charm-libs.dependencies] +ops = ">=2.0.0" +pydantic = ">=2.11" +psycopg2 = "^2.9.10" + +[tool.poetry.requires-plugins] +poetry-plugin-export = ">=1.8" + +[build-system] +build-backend = "poetry.core.masonry.api" diff --git a/tests/v1/integration/dummy-database-charm/src/charm.py b/tests/v1/integration/dummy-database-charm/src/charm.py new file mode 100755 index 00000000..c10c0b03 --- /dev/null +++ b/tests/v1/integration/dummy-database-charm/src/charm.py @@ -0,0 +1,222 @@ +#!/usr/bin/env python3 +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Database charm that accepts connections from application charms. + +This charm is meant to be used only for testing +of the libraries in this repository. +""" + +import logging +import secrets +import string +from typing import Annotated, Optional + +from ops import Relation +from ops.charm import ActionEvent, CharmBase +from ops.main import main +from ops.model import ActiveStatus +from pydantic import Field + +from charms.data_platform_libs.v1.data_interfaces import ( + DataContractV1, + OpsOtherPeerUnitRepository, + OpsPeerRepositoryInterface, + OpsPeerUnitRepositoryInterface, + OptionalSecretStr, + PeerModel, + RequirerCommonModel, + ResourceProviderEventHandler, + ResourceProviderModel, + SecretGroup, +) + +logger = logging.getLogger(__name__) + +PEER = "database-peers" + + +MygroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] + + +class PeerAppModel(PeerModel): + field: MygroupSecretStr = Field(default=None) + + +class ExtendedResourceProviderModel(ResourceProviderModel): + field: MygroupSecretStr = Field(default=None) + + +ExtendedDataContractV1 = DataContractV1[ExtendedResourceProviderModel] + + +class DatabaseCharm(CharmBase): + """Database charm that accepts connections from application charms.""" + + def __init__(self, *args): + super().__init__(*args) + self._servers_data = {} + + self.peer_relation_app = OpsPeerRepositoryInterface( + self, relation_name=PEER, model=PeerAppModel + ) + + self.peer_relation_unit = OpsPeerUnitRepositoryInterface( + self, relation_name=PEER, model=PeerAppModel + ) + + self.database = ResourceProviderEventHandler(self, "database", RequirerCommonModel) + + # Default charm events. + self.framework.observe(self.on.start, self._on_start) + + # Stored state is used to track the password of the database superuser. + self.framework.observe( + self.on.change_admin_password_action, self._on_change_admin_password + ) + + self.framework.observe(self.on.set_secret_action, self._on_set_secret_action) + + # Get/set/delete values on database relaton + self.framework.observe( + self.on.get_peer_relation_field_action, self._on_get_peer_relation_field + ) + + self.framework.observe(self.on.set_peer_secret_action, self._on_set_peer_secret) + self.framework.observe(self.on.get_peer_secret_action, self._on_get_peer_secret) + self.framework.observe(self.on.delete_peer_secret_action, self._on_delete_peer_secret) + + @property + def peer_relation(self) -> Optional[Relation]: + """The cluster peer relation.""" + return self.model.get_relation(PEER) + + @property + def peer_units_data_interfaces(self) -> dict: + """The cluster peer relation.""" + if not self.peer_relation or not self.peer_relation.units: + return {} + + for unit in self.peer_relation.units: + if unit not in self._servers_data: + self._servers_data[unit] = OpsOtherPeerUnitRepository( + self.model, + relation=self.peer_relation, + component=unit, + ) + return self._servers_data + + def _on_start(self, _) -> None: + """Only sets an Active status.""" + self.unit.status = ActiveStatus() + + def _on_change_admin_password(self, event: ActionEvent): + """Change the admin password.""" + password = self._new_password() + for relation in self.database.interface.relations: + model: DataContractV1 = self.database.interface.build_model( + relation.id, ExtendedDataContractV1 + ) + for request in model.requests: + request.password = password + self.database.interface.write_model(relation.id, model) + + def _on_set_secret_action(self, event: ActionEvent): + """Change the admin password.""" + if not (password := event.params["password"]): + password = self._new_password() + for relation in self.database.interface.relations: + model: DataContractV1 = self.database.interface.build_model( + relation.id, ExtendedDataContractV1 + ) + field: str = event.params.get("field") or "value" + if not ExtendedResourceProviderModel._get_secret_field(field): + event.fail("Invalid secret field.") + for request in model.requests: + setattr(request, field, password) + self.database.interface.write_model(relation.id, model) + + def _new_password(self) -> str: + """Generate a random password string. + + Returns: + A random password string. + """ + choices = string.ascii_letters + string.digits + return "".join([secrets.choice(choices) for i in range(16)]) + + def _on_get_peer_secret(self, event: ActionEvent): + """Set requested relation field.""" + component = event.params["component"] + + # Charms should be compatible with old versions, to simulate rolling upgrade + if component == "app": + relation_bis = self.peer_relation_app.relations[0] + repository = self.peer_relation_app.repository(relation_bis.id) + result = repository.get_secret_field(event.params["field"], event.params["group"]) + + else: + relation_bis = self.peer_relation_unit.relations[0] + repository = self.peer_relation_unit.repository(relation_bis.id) + result = repository.get_secret_field(event.params["field"], event.params["group"]) + + event.set_results({event.params["field"]: result if result else ""}) + + def _on_set_peer_secret(self, event: ActionEvent): + """Set requested relation field.""" + component = event.params["component"] + + # Charms should be compatible with old versions, to simulate rolling upgrade + if component == "app": + relation_bis = self.peer_relation_app.relations[0] + repository = self.peer_relation_app.repository(relation_bis.id) + repository.write_secret_field( + event.params["field"], + event.params["value"], + event.params["group"] or SecretGroup("extra"), + ) + else: + relation_bis = self.peer_relation_unit.relations[0] + repository = self.peer_relation_unit.repository(relation_bis.id) + repository.write_secret_field( + event.params["field"], event.params["value"], event.params["group"] + ) + + # Remove peer secrets + def _on_delete_peer_secret(self, event: ActionEvent): + """Delete requested relation field.""" + component = event.params["component"] + + # Charms should be compatible with old versions, to simulate rolling upgrade + secret = None + group_str = "" if not event.params["group"] else f".{event.params['group']}" + if component == "app": + secret = self.model.get_secret(label=f"{PEER}.{self.app.name}.app{group_str}") + else: + secret = self.model.get_secret(label=f"{PEER}.{self.app.name}.unit{group_str}") + + if secret: + secret.remove_all_revisions() + + # Get/set/delete field on the peer relation + def _on_get_peer_relation_field(self, event: ActionEvent): + """[second_database]: Set requested relation field.""" + component = event.params["component"] + + value = None + if component == "app": + relation_bis = self.peer_relation_app.relations[0] + repository = self.peer_relation_app.repository(relation_bis.id) + value_new = repository.get_field(event.params["field"]) + else: + relation_bis = self.peer_relation_unit.relations[0] + repository = self.peer_relation_unit.repository(relation_bis.id) + value_new = repository.get_field(event.params["field"]) + event.set_results( + {"value": value if value else "", "value-new": value_new if value_new else ""} + ) + + +if __name__ == "__main__": + main(DatabaseCharm) diff --git a/tests/v1/integration/helpers.py b/tests/v1/integration/helpers.py new file mode 100644 index 00000000..e2434877 --- /dev/null +++ b/tests/v1/integration/helpers.py @@ -0,0 +1,314 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. +import json +from time import sleep +from typing import Dict, List, Optional + +import yaml +from pytest_operator.plugin import OpsTest + +PROV_SECRET_PREFIX = "secret-" + + +async def get_juju_secret(ops_test: OpsTest, secret_uri: str) -> Dict[str, str]: + """Retrieve juju secret.""" + secret_unique_id = secret_uri.split("/")[-1] + complete_command = f"show-secret {secret_uri} --reveal --format=json" + _, stdout, _ = await ops_test.juju(*complete_command.split()) + return json.loads(stdout)[secret_unique_id]["content"]["Data"] + + +async def list_juju_secrets(ops_test: OpsTest) -> List[str]: + """Check if a juju secret does not exist.""" + _, stdout, _ = await ops_test.juju("list-secrets") + data = stdout.split("\n") + data = data[1:] + return [line.split()[0] for line in data if line] + + +async def get_leader_id(ops_test: OpsTest, app_name: str) -> int: + """Returns the unit number of the juju leader unit.""" + for unit in ops_test.model.applications[app_name].units: + if await unit.is_leader_from_status(): + return int(unit.name.split("/")[1]) + return -1 + + +async def get_non_leader_id(ops_test: OpsTest, app_name: str) -> int: + """Returns the unit number of the juju leader unit.""" + for unit in ops_test.model.applications[app_name].units: + if not await unit.is_leader_from_status(): + return int(unit.name.split("/")[1]) + return -1 + + +async def build_connection_string( + ops_test: OpsTest, + application_name: str, + relation_name: str, + *, + relation_id: str | None = None, + relation_alias: str | None = None, +) -> str: + """Build a PostgreSQL connection string. + + Args: + ops_test: The ops test framework instance + application_name: The name of the application + relation_name: name of the relation to get connection data from + relation_id: id of the relation to get connection data from + relation_alias: alias of the relation (like a connection name) + to get connection data from + + Returns: + a PostgreSQL connection string + """ + # Get the connection data exposed to the application through the relation. + database = f"{application_name.replace('-', '_')}_{relation_name.replace('-', '_')}" + + requests = json.loads( + await get_application_relation_data( + ops_test, application_name, relation_name, "requests", relation_id, relation_alias + ) + or "[]" + ) + + # We only have one request. + request = requests[0] + + secret_uri = request[f"{PROV_SECRET_PREFIX}user"] + secret_data = await get_juju_secret(ops_test, secret_uri) + username = secret_data["username"] + password = secret_data["password"] + + endpoints = request["endpoints"] + host = endpoints.split(",")[0].split(":")[0] + + # Build the complete connection string to connect to the database. + return f"dbname='{database}' user='{username}' host='{host}' password='{password}' connect_timeout=10" + + +async def get_connection_info( + ops_test: OpsTest, + application_name: str, + relation_name: str, + relation_id: str = None, +) -> str: + """Build a dictionary that contains the connection info. + + Args: + ops_test: The ops test framework instance + application_name: The name of the application + relation_name: name of the relation to get connection data from + relation_id: id of the relation to get connection data from + + Returns: + a dictionary that contains connection info fields + """ + # Get the connection data exposed to the application through the relation. + + access_key = await get_application_relation_data( + ops_test, application_name, relation_name, "access-key", relation_id + ) + secret_key = await get_application_relation_data( + ops_test, application_name, relation_name, "secret-key", relation_id + ) + endpoint = await get_application_relation_data( + ops_test, application_name, relation_name, "endpoint", relation_id + ) + bucket = await get_application_relation_data( + ops_test, application_name, relation_name, "bucket", relation_id + ) + path = await get_application_relation_data( + ops_test, application_name, relation_name, "path", relation_id + ) + region = await get_application_relation_data( + ops_test, application_name, relation_name, "region", relation_id + ) + s3_uri_style = await get_application_relation_data( + ops_test, application_name, relation_name, "s3-uri-style", relation_id + ) + storage_class = await get_application_relation_data( + ops_test, application_name, relation_name, "storage-class", relation_id + ) + tls_ca_chain = await get_application_relation_data( + ops_test, application_name, relation_name, "tls-ca-chain", relation_id + ) + s3_api_version = await get_application_relation_data( + ops_test, application_name, relation_name, "s3-api-version", relation_id + ) + attributes = await get_application_relation_data( + ops_test, application_name, relation_name, "attributes", relation_id + ) + + connection_info = { + "access-key": access_key, + "secret-key": secret_key, + "endpoint": endpoint, + "bucket": bucket, + "path": path, + "region": region, + "s3-uri-style": s3_uri_style, + "storage-class": storage_class, + "tls-ca-chain": tls_ca_chain, + "s3-api-version": s3_api_version, + "attributes": attributes, + } + # Return the connection info extracted from the relation. + return connection_info + + +async def get_alias_from_relation_data( + ops_test: OpsTest, unit_name: str, related_unit_name: str +) -> Optional[str]: + """Get the alias that the unit assigned to the related unit application/cluster. + + Args: + ops_test: The ops test framework instance + unit_name: The name of the unit + related_unit_name: name of the related unit + + Returns: + the alias for the application/cluster of + the related unit + + Raises: + ValueError if it's not possible to get unit data + or if there is no alias on that. + """ + raw_data = (await ops_test.juju("show-unit", related_unit_name))[1] + if not raw_data: + raise ValueError(f"no unit info could be grabbed for {related_unit_name}") + data = yaml.safe_load(raw_data) + + # Retrieve the relation data from the unit. + relation_data = {} + for relation in data[related_unit_name]["relation-info"]: + # Peer relation typically + if "related-units" not in relation: + continue + for name, unit in relation["related-units"].items(): + if name == unit_name: + relation_data = unit["data"] + break + + # Check whether the unit has set an alias for the related unit application/cluster. + if "alias" not in relation_data: + raise ValueError(f"no alias could be grabbed for {related_unit_name} application/cluster") + + return relation_data["alias"] + + +async def get_application_relation_data( + ops_test: OpsTest, + application_name: str, + relation_name: str, + key: str, + relation_id: str | None = None, + relation_alias: str | None = None, + related_endpoint: str | None = None, + app_or_unit: str = "app", +) -> Optional[str]: + """Get relation data for an application. + + Args: + ops_test: The ops test framework instance + application_name: The name of the application + relation_name: name of the relation to get connection data from + key: key of data to be retrieved + relation_id: id of the relation to get connection data from + relation_alias: alias of the relation (like a connection name) + to get connection data from + related_endpoint: the related endpoint, i.e. the "other side" of the relation + app_or_unit: whether it's the app or the unit databag to be searched + + Returns: + the data that was requested or None + if no data in the relation + + Raises: + ValueError if it's not possible to get application data + or if there is no data for the particular relation endpoint + and/or alias. + """ + unit_name = f"{application_name}/0" + raw_data = (await ops_test.juju("show-unit", unit_name))[1] + if not raw_data: + raise ValueError(f"no unit info could be grabbed for {unit_name}") + data = yaml.safe_load(raw_data) + # Filter the data based on the relation name. + relation_data = [v for v in data[unit_name]["relation-info"] if v["endpoint"] == relation_name] + + if relation_id: + # Filter the data based on the relation id. + relation_data = [v for v in relation_data if v["relation-id"] == relation_id] + if relation_alias: + # Filter the data based on the cluster/relation alias. + relation_data = [ + v + for v in relation_data + if await get_alias_from_relation_data( + ops_test, unit_name, next(iter(v["related-units"])) + ) + == relation_alias + ] + if related_endpoint: + relation_data = [ + v + for v in data[unit_name]["relation-info"] + if v["related-endpoint"] == related_endpoint + ] + if len(relation_data) == 0: + raise ValueError( + f"no relation data could be grabbed on relation with endpoint {relation_name} and alias {relation_alias}" + ) + if app_or_unit == "app": + return relation_data[0]["application-data"].get(key) + else: + return relation_data[0]["local-unit"].get("data", {}).get(key) + + +async def check_logs(ops_test: OpsTest, strings: str, limit: int = 10) -> bool: + """Check if any of strings may appear in juju debug-log.""" + # juju debug-log may not be flushed yet, thus the "tenacity simulation" + for tries in range(5): + sleep(3) + _, dbg_log, _ = await ops_test.juju("debug-log", "--no-tail", "--replay") + if any(text in dbg_log for text in strings): + return True + return False + + +async def get_secret_by_label(ops_test, label: str, owner: str = "") -> Dict[str, str]: + secrets_raw = await ops_test.juju("list-secrets") + secret_ids = [ + secret_line.split()[0] for secret_line in secrets_raw[1].split("\n")[1:] if secret_line + ] + + for secret_id in secret_ids: + secret_data_raw = await ops_test.juju( + "show-secret", "--format", "json", "--reveal", secret_id + ) + secret_data = json.loads(secret_data_raw[1]) + + if label == secret_data[secret_id].get("label"): + if not owner or owner == secret_data[secret_id].get("owner"): + return secret_data[secret_id]["content"]["Data"] + + +async def get_secret_revision_by_label(ops_test, label: str, owner: str = "") -> int: + secrets_raw = await ops_test.juju("list-secrets") + secret_ids = [ + secret_line.split()[0] for secret_line in secrets_raw[1].split("\n")[1:] if secret_line + ] + + for secret_id in secret_ids: + secret_data_raw = await ops_test.juju( + "show-secret", "--format", "json", "--reveal", secret_id + ) + secret_data = json.loads(secret_data_raw[1]) + + if label == secret_data[secret_id].get("label"): + if not owner or owner == secret_data[secret_id].get("owner"): + return int(secret_data[secret_id]["revision"]) diff --git a/tests/v1/integration/kafka-charm/actions.yaml b/tests/v1/integration/kafka-charm/actions.yaml new file mode 100644 index 00000000..8a8d6bca --- /dev/null +++ b/tests/v1/integration/kafka-charm/actions.yaml @@ -0,0 +1,25 @@ +# Copyright 2022 Canonical Ltd. +sync-password: + description: Set password. + params: + password: + type: string + description: Kafka password. + required: [password] +sync-username: + description: Set usernmae. + params: + username: + type: string + description: Kafka username. + required: [username] + +sync-bootstrap-server: + description: Set bootstrap-server. + params: + bootstrap-server: + type: string + description: Kafka bootstrap-server. + required: [bootstrap-server] + + diff --git a/tests/v1/integration/kafka-charm/charmcraft.yaml b/tests/v1/integration/kafka-charm/charmcraft.yaml new file mode 100644 index 00000000..cf133daf --- /dev/null +++ b/tests/v1/integration/kafka-charm/charmcraft.yaml @@ -0,0 +1,84 @@ +# Copyright 2021 Canonical Ltd. +# See LICENSE file for licensing details. + +type: charm +# Whenever "bases" is changed: +# - Update tests/integration/conftest.py::pytest_configure() +# - Update .github/workflow/ci.yaml integration-test matrix +platforms: + ubuntu@22.04:amd64: + ubuntu@24.04:amd64: + +# Files implicitly created by charmcraft without a part: +# - dispatch (https://github.com/canonical/charmcraft/pull/1898) +# - manifest.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L259) +# Files implicitly copied/"staged" by charmcraft without a part: +# - actions.yaml, config.yaml, metadata.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L290-L293 +# https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L156-L157) +parts: + poetry-deps: + plugin: nil + build-packages: + - curl + override-build: | + # Use environment variable instead of `--break-system-packages` to avoid failing on older + # versions of pip that do not recognize `--break-system-packages` + # `--user` needed (in addition to `--break-system-packages`) for Ubuntu >=24.04 + PIP_BREAK_SYSTEM_PACKAGES=true python3 -m pip install --user --upgrade pip==24.3.1 # renovate: charmcraft-pip-latest + + # Use uv to install poetry so that a newer version of Python can be installed if needed by poetry + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.5.15/uv-installer.sh | sh # renovate: charmcraft-uv-latest + # poetry 2.0.0 requires Python >=3.9 + if ! "$HOME/.local/bin/uv" python find '>=3.9' + then + # Use first Python version that is >=3.9 and available in an Ubuntu LTS + # (to reduce the number of Python versions we use) + "$HOME/.local/bin/uv" python install 3.10.12 # renovate: charmcraft-python-ubuntu-22.04 + fi + "$HOME/.local/bin/uv" tool install --no-python-downloads --python '>=3.9' poetry==2.0.0 --with poetry-plugin-export==1.8.0 # renovate: charmcraft-poetry-latest + + ln -sf "$HOME/.local/bin/poetry" /usr/local/bin/poetry + # "charm-poetry" part name is arbitrary; use for consistency + # Avoid using "charm" part name since that has special meaning to charmcraft + charm-poetry: + # By default, the `poetry` plugin creates/stages these directories: + # - lib, src + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L76-L78) + # - venv + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L95 + # https://github.com/canonical/craft-parts/blob/afb0d652eb330b6aaad4f40fbd6e5357d358de47/craft_parts/plugins/base.py#L270) + plugin: poetry + source: . + after: + - poetry-deps + poetry-export-extra-args: ['--only', 'main,charm-libs'] + build-packages: + - libffi-dev # Needed to build Python dependencies with Rust from source + - libssl-dev # Needed to build Python dependencies with Rust from source + - pkg-config # Needed to build Python dependencies with Rust from source + override-build: | + # Workaround for https://github.com/canonical/charmcraft/issues/2068 + # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source + if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] + then + snap install rustup --classic + else + apt-get install rustup -y + fi + + # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu + # archive—which means the rustup version could be updated at any time. Print rustup version + # to build log to make changes to the snap's rustup version easier to track + rustup --version + + # rpds-py (Python package) >=0.19.0 requires rustc >=1.76, which is not available in the + # Ubuntu 22.04 archive. Install rustc and cargo using rustup instead of the Ubuntu archive + rustup set profile minimal + rustup default 1.83.0 # renovate: charmcraft-rust-latest + + + craftctl default + # Include requirements.txt in *.charm artifact for easier debugging + cp requirements.txt "$CRAFT_PART_INSTALL/requirements.txt" diff --git a/tests/v1/integration/kafka-charm/lib/charms/data_platform_libs/v1/.gitkeep b/tests/v1/integration/kafka-charm/lib/charms/data_platform_libs/v1/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/tests/v1/integration/kafka-charm/metadata.yaml b/tests/v1/integration/kafka-charm/metadata.yaml new file mode 100644 index 00000000..55f4e120 --- /dev/null +++ b/tests/v1/integration/kafka-charm/metadata.yaml @@ -0,0 +1,16 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. +name: kafka +description: | + Toy charm used to emulate Kafka in integration tests. +summary: | + Charm used to mimic the Kafka charm for test purpose only. + + +peers: + kafka-peers: + interface: kafka_peers + +provides: + kafka-client: + interface: kafka_client diff --git a/tests/v1/integration/kafka-charm/pyproject.toml b/tests/v1/integration/kafka-charm/pyproject.toml new file mode 100644 index 00000000..8ff16202 --- /dev/null +++ b/tests/v1/integration/kafka-charm/pyproject.toml @@ -0,0 +1,18 @@ +[tool.poetry] +package-mode = false +requires-poetry = ">=2.0.0" + +[tool.poetry.dependencies] +python = "^3.10.12" +ops = ">=2.0.0,<3.0.0" +pydantic = ">=2.11" + +[tool.poetry.group.charm-libs.dependencies] +ops = ">=2.0.0" +pydantic = ">=2.11" + +[tool.poetry.requires-plugins] +poetry-plugin-export = ">=1.8" + +[build-system] +build-backend = "poetry.core.masonry.api" diff --git a/tests/v1/integration/kafka-charm/src/charm.py b/tests/v1/integration/kafka-charm/src/charm.py new file mode 100755 index 00000000..77fadf95 --- /dev/null +++ b/tests/v1/integration/kafka-charm/src/charm.py @@ -0,0 +1,230 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Kafka provider charm that accepts connections from application charms. + +This charm is meant to be used only for testing +of the libraries in this repository. +""" + +import logging +import os + +from ops.charm import ActionEvent, CharmBase +from ops.main import main +from ops.model import ActiveStatus, MaintenanceStatus +from pydantic import SecretStr + +from charms.data_platform_libs.v1.data_interfaces import ( + DataContractV1, + KafkaRequestModel, + KafkaResponseModel, + MtlsCertUpdatedEvent, + ResourceEntityRequestedEvent, + ResourceProviderEventHandler, + ResourceRequestedEvent, + SecretBool, +) + +logger = logging.getLogger(__name__) + +PEER = "kafka-peers" +REL = "kafka-client" + + +class KafkaCharm(CharmBase): + """Kafka charm that accepts connections from application charms.""" + + def __init__(self, *args): + super().__init__(*args) + + # Default charm events. + self.framework.observe(self.on.start, self._on_start) + + # Charm events defined in the Kafka Provides charm library. + self.kafka_provider = ResourceProviderEventHandler( + self, relation_name=REL, request_model=KafkaRequestModel, mtls_enabled=True + ) + self.framework.observe( + self.kafka_provider.on.resource_requested, + self._on_topic_requested, + ) + self.framework.observe( + self.kafka_provider.on.resource_entity_requested, + self._on_topic_entity_requested, + ) + + self.framework.observe(self.on[PEER].relation_joined, self._on_peer_relation_joined) + + # actions + self.framework.observe(self.on.sync_password_action, self._on_sync_password) + self.framework.observe(self.on.sync_username_action, self._on_sync_username) + self.framework.observe( + self.on.sync_bootstrap_server_action, self._on_sync_bootstrap_server + ) + self.framework.observe( + self.kafka_provider.on.mtls_cert_updated, self._on_mtls_cert_updated + ) + + def _on_peer_relation_joined(self, _): + pass + + @property + def app_peer_data(self) -> dict: + """Application peer relation data object.""" + relation = self.model.get_relation(PEER) + if not relation: + return {} + + return dict(relation.data[self.app]) + + def get_secret(self, scope: str, key: str) -> str | None: + """Get secret from the secret storage.""" + if scope == "app": + return self.app_peer_data.get(key, None) + else: + raise RuntimeError("Unknown secret scope.") + + def set_secret(self, scope: str, key: str, value: str | None) -> None: + """Set secret in the secret storage.""" + if scope == "app": + if not value: + del self.app_peer_data[key] + return + self.app_peer_data.update({key: value}) + else: + raise RuntimeError("Unknown secret scope.") + + def _on_start(self, _) -> None: + """Only sets an active status.""" + self.unit.status = ActiveStatus("Kafka Ready!") + + def _on_topic_requested(self, event: ResourceRequestedEvent[KafkaRequestModel]): + """Handle the on_topic_requested event.""" + self.unit.status = MaintenanceStatus("Creating connection") + # retrieve topic name from the requirer side + topic = event.request.resource + consumer_group_prefix = event.request.consumer_group_prefix + + if consumer_group_prefix is not None: + consumer_group_prefix = SecretStr(consumer_group_prefix) + + relation_id = event.relation.id + + username = "admin" + password = "password" + bootstrap_server = "host1:port,host2:port" + self.set_secret("app", "username", username) + self.set_secret("app", "password", password) + self.set_secret("app", "bootstrap-server", bootstrap_server) + # set connection info in the databag relation + response = KafkaResponseModel( + salt=event.request.salt, + request_id=event.request.request_id, + username=SecretStr(username), + password=SecretStr(password), + endpoints=bootstrap_server, + consumer_group_prefix=consumer_group_prefix, + tls=SecretBool(True), + tls_ca=SecretStr("Canonical"), + zookeeper_uris=SecretStr("protocol.z1:port/,protocol.z2:port/"), + resource=topic, + ) + self.kafka_provider.set_response(relation_id, response) + self.unit.status = ActiveStatus(f"Topic: {topic} granted!") + + def _on_topic_entity_requested(self, event: ResourceEntityRequestedEvent): + """Handle the on_topic_entity_requested event.""" + self.unit.status = MaintenanceStatus("Creating entity") + + rolename = "admin" + password = "password" + self.set_secret("app", "entity-name", rolename) + self.set_secret("app", "entity-password", password) + response = KafkaResponseModel( + request_id=event.request.request_id, + salt=event.request.salt, + entity_name=SecretStr(rolename), + entity_password=SecretStr(password), + ) + # set connection info in the databag relation + self.kafka_provider.set_response(event.relation.id, response) + self.unit.status = ActiveStatus(f"Entity: {rolename} created!") + + def _on_sync_password(self, event: ActionEvent): + """Set the password in the data relation databag.""" + logger.info("On sync password") + + password: str | None = event.params.get("password") + if not password: + event.fail("Invalid password") + return + self.set_secret("app", "password", password) + logger.info(f"New password: {password}") + # set parameters in the secrets + # update relation data if the relation is present + if len(self.kafka_provider.relations) > 0: + for relation in self.kafka_provider.relations: + model = self.kafka_provider.interface.build_model( + relation.id, DataContractV1[KafkaResponseModel] + ) + for request in model.requests: + request.password = SecretStr(password) + self.kafka_provider.interface.write_model(relation.id, model) + event.set_results({"password": self.get_secret("app", "password")}) + + def _on_sync_username(self, event: ActionEvent): + """Set the username in the data relation databag.""" + username = event.params["username"] + + if not username: + event.fail("Invalid username") + return + + self.set_secret("app", "username", username) + + # set parameters in the secrets + # update relation data if the relation is present + if len(self.kafka_provider.relations) > 0: + for relation in self.kafka_provider.relations: + model = self.kafka_provider.interface.build_model( + relation.id, DataContractV1[KafkaResponseModel] + ) + for request in model.requests: + request.username = username + self.kafka_provider.interface.write_model(relation.id, model) + event.set_results({"username": self.get_secret("app", "username")}) + + def _on_sync_bootstrap_server(self, event: ActionEvent): + """Set the bootstrap server in the data relation databag.""" + bootstrap_server = event.params["bootstrap-server"] + self.set_secret("app", "bootstrap-server", bootstrap_server) + # set parameters in the secrets + # update relation data if the relation is present + if len(self.kafka_provider.relations) > 0: + for relation in self.kafka_provider.relations: + model = self.kafka_provider.interface.build_model( + relation.id, DataContractV1[KafkaResponseModel] + ) + for request in model.requests: + request.endpoints = self.get_secret("app", "bootstrap-server") + self.kafka_provider.interface.write_model(relation.id, model) + event.set_results({"bootstrap-server": self.get_secret("app", "bootstrap-server")}) + + def _on_reset_unit_status(self, event: ActionEvent): + """Reset the status message of the unit.""" + self.unit.status = ActiveStatus() + event.set_results({"Status": "Reset unit status message"}) + + def _on_mtls_cert_updated(self, event: MtlsCertUpdatedEvent): + mtls_cert = event.request.mtls_cert + if not mtls_cert: + return + + open("client-cert.pem", "w").write(mtls_cert.get_secret_value()) + self.unit.status = ActiveStatus(f"{os.getcwd()}/client-cert.pem") + + +if __name__ == "__main__": + main(KafkaCharm) diff --git a/tests/v1/integration/kafka-connect-charm/actions.yaml b/tests/v1/integration/kafka-connect-charm/actions.yaml new file mode 100644 index 00000000..79bafa15 --- /dev/null +++ b/tests/v1/integration/kafka-connect-charm/actions.yaml @@ -0,0 +1,13 @@ +# Copyright 2025 Canonical Ltd. +sync: + description: Change Kafka Connect provider username/password/endpoints. + params: + key: + type: string + description: | + one of the following values: username | password | endpoints + value: + type: string + description: New username/password/endpoints. + required: [key, value] + diff --git a/tests/v1/integration/kafka-connect-charm/charmcraft.yaml b/tests/v1/integration/kafka-connect-charm/charmcraft.yaml new file mode 100644 index 00000000..b4317391 --- /dev/null +++ b/tests/v1/integration/kafka-connect-charm/charmcraft.yaml @@ -0,0 +1,84 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +type: charm +# Whenever "bases" is changed: +# - Update tests/integration/conftest.py::pytest_configure() +# - Update .github/workflow/ci.yaml integration-test matrix +platforms: + ubuntu@22.04:amd64: + ubuntu@24.04:amd64: + +# Files implicitly created by charmcraft without a part: +# - dispatch (https://github.com/canonical/charmcraft/pull/1898) +# - manifest.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L259) +# Files implicitly copied/"staged" by charmcraft without a part: +# - actions.yaml, config.yaml, metadata.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L290-L293 +# https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L156-L157) +parts: + poetry-deps: + plugin: nil + build-packages: + - curl + override-build: | + # Use environment variable instead of `--break-system-packages` to avoid failing on older + # versions of pip that do not recognize `--break-system-packages` + # `--user` needed (in addition to `--break-system-packages`) for Ubuntu >=24.04 + PIP_BREAK_SYSTEM_PACKAGES=true python3 -m pip install --user --upgrade pip==24.3.1 # renovate: charmcraft-pip-latest + + # Use uv to install poetry so that a newer version of Python can be installed if needed by poetry + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.5.15/uv-installer.sh | sh # renovate: charmcraft-uv-latest + # poetry 2.0.0 requires Python >=3.9 + if ! "$HOME/.local/bin/uv" python find '>=3.9' + then + # Use first Python version that is >=3.9 and available in an Ubuntu LTS + # (to reduce the number of Python versions we use) + "$HOME/.local/bin/uv" python install 3.10.12 # renovate: charmcraft-python-ubuntu-22.04 + fi + "$HOME/.local/bin/uv" tool install --no-python-downloads --python '>=3.9' poetry==2.0.0 --with poetry-plugin-export==1.8.0 # renovate: charmcraft-poetry-latest + + ln -sf "$HOME/.local/bin/poetry" /usr/local/bin/poetry + # "charm-poetry" part name is arbitrary; use for consistency + # Avoid using "charm" part name since that has special meaning to charmcraft + charm-poetry: + # By default, the `poetry` plugin creates/stages these directories: + # - lib, src + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L76-L78) + # - venv + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L95 + # https://github.com/canonical/craft-parts/blob/afb0d652eb330b6aaad4f40fbd6e5357d358de47/craft_parts/plugins/base.py#L270) + plugin: poetry + source: . + after: + - poetry-deps + poetry-export-extra-args: ['--only', 'main,charm-libs'] + build-packages: + - libffi-dev # Needed to build Python dependencies with Rust from source + - libssl-dev # Needed to build Python dependencies with Rust from source + - pkg-config # Needed to build Python dependencies with Rust from source + override-build: | + # Workaround for https://github.com/canonical/charmcraft/issues/2068 + # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source + if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] + then + snap install rustup --classic + else + apt-get install rustup -y + fi + + # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu + # archive—which means the rustup version could be updated at any time. Print rustup version + # to build log to make changes to the snap's rustup version easier to track + rustup --version + + # rpds-py (Python package) >=0.19.0 requires rustc >=1.76, which is not available in the + # Ubuntu 22.04 archive. Install rustc and cargo using rustup instead of the Ubuntu archive + rustup set profile minimal + rustup default 1.83.0 # renovate: charmcraft-rust-latest + + + craftctl default + # Include requirements.txt in *.charm artifact for easier debugging + cp requirements.txt "$CRAFT_PART_INSTALL/requirements.txt" diff --git a/tests/v1/integration/kafka-connect-charm/lib/charms/data_platform_libs/v1/.gitkeep b/tests/v1/integration/kafka-connect-charm/lib/charms/data_platform_libs/v1/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/tests/v1/integration/kafka-connect-charm/metadata.yaml b/tests/v1/integration/kafka-connect-charm/metadata.yaml new file mode 100644 index 00000000..e93e1229 --- /dev/null +++ b/tests/v1/integration/kafka-connect-charm/metadata.yaml @@ -0,0 +1,16 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. +name: kafka-connect +description: | + Toy charm used to emulate Kafka Connect in integration tests. +summary: | + Charm used to mimic the Kafka Connect for test purpose only. + + +peers: + worker: + interface: worker + +provides: + connect-client: + interface: connect_client diff --git a/tests/v1/integration/kafka-connect-charm/pyproject.toml b/tests/v1/integration/kafka-connect-charm/pyproject.toml new file mode 100644 index 00000000..8ff16202 --- /dev/null +++ b/tests/v1/integration/kafka-connect-charm/pyproject.toml @@ -0,0 +1,18 @@ +[tool.poetry] +package-mode = false +requires-poetry = ">=2.0.0" + +[tool.poetry.dependencies] +python = "^3.10.12" +ops = ">=2.0.0,<3.0.0" +pydantic = ">=2.11" + +[tool.poetry.group.charm-libs.dependencies] +ops = ">=2.0.0" +pydantic = ">=2.11" + +[tool.poetry.requires-plugins] +poetry-plugin-export = ">=1.8" + +[build-system] +build-backend = "poetry.core.masonry.api" diff --git a/tests/v1/integration/kafka-connect-charm/src/charm.py b/tests/v1/integration/kafka-connect-charm/src/charm.py new file mode 100755 index 00000000..d53b29d9 --- /dev/null +++ b/tests/v1/integration/kafka-connect-charm/src/charm.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python3 +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Kafka Connect provider charm that accepts connections from application charms. + +This charm is meant to be used only for testing +of the libraries in this repository. +""" + +import logging + +from ops.charm import ActionEvent, CharmBase +from ops.main import main +from ops.model import ActiveStatus, MaintenanceStatus +from pydantic import SecretStr + +from charms.data_platform_libs.v1.data_interfaces import ( + DataContractV1, + RequirerCommonModel, + ResourceProviderEventHandler, + ResourceProviderModel, + ResourceRequestedEvent, + SecretBool, +) + +logger = logging.getLogger(__name__) + +PEER = "worker" +REL = "connect-client" +BAD_URL = "http://badurl" +SYNC_ACTIONS = ("username", "password", "endpoints") + + +class KafkaConnectCharm(CharmBase): + """Kafka connect charm that accepts connections from application charms.""" + + def __init__(self, *args): + super().__init__(*args) + + # Default charm events. + self.framework.observe(self.on.start, self._on_start) + + # Charm events defined in the Kafka Connect Provides charm library. + self.provider = ResourceProviderEventHandler(self, REL, RequirerCommonModel) + self.framework.observe(self.provider.on.resource_requested, self._on_integration_requested) + self.framework.observe(self.on[PEER].relation_joined, self._on_peer_relation_joined) + + # syncaction + self.framework.observe(self.on.sync_action, self._on_sync) + + def _on_peer_relation_joined(self, _): + pass + + @property + def app_peer_data(self) -> dict: + """Application peer relation data object.""" + relation = self.model.get_relation(PEER) + if not relation: + return {} + + return dict(relation.data[self.app]) + + def get_secret(self, scope: str, key: str) -> str: + """Get secret from the secret storage.""" + if scope == "app": + return self.app_peer_data.get(key, "") + else: + raise RuntimeError("Unknown secret scope.") + + def set_secret(self, scope: str, key: str, value: str | None) -> None: + """Set secret in the secret storage.""" + if scope == "app": + if not value: + del self.app_peer_data[key] + return + self.app_peer_data.update({key: value}) + else: + raise RuntimeError("Unknown secret scope.") + + def _on_start(self, _) -> None: + """Only sets an active status.""" + self.unit.status = ActiveStatus("Kafka Connect Ready!") + + def _download_plugin(self, plugin_url) -> bool: + """Fake plugin downloader, returns True on every URL except `BAD_URL`.""" + if plugin_url == BAD_URL: + return False + + return True + + def _on_integration_requested(self, event: ResourceRequestedEvent): + """Handle the `on_integration_requested` event.""" + # retrieve `plugin-url` from the requirer side + plugin_url = event.request.resource + self.unit.status = MaintenanceStatus(f"Retrieving plugin from client: {plugin_url}.") + + if not self._download_plugin(plugin_url): + event.defer() + return + + self.unit.status = MaintenanceStatus("Plugin downloaded successfully.") + relation_id = event.relation.id + + username = "integrator" + password = "password" + endpoints = "http://worker1:8083,http://worker2:8083" + self.set_secret("app", "username", username) + self.set_secret("app", "password", password) + self.set_secret("app", "endpoints", endpoints) + # set connection info in the databag relation + response = ResourceProviderModel( + salt=event.request.salt, + request_id=event.request.request_id, + endpoints=endpoints, + username=SecretStr(username), + password=SecretStr(password), + tls=SecretBool(False), + tls_ca=SecretStr("disabled"), + ) + self.provider.set_response(relation_id, response) + self.unit.status = ActiveStatus( + f"Integration setup successful for relation {relation_id}!" + ) + + def _on_sync(self, event: ActionEvent): + """Handler for `sync` action.""" + key = event.params.get("key") + if key not in SYNC_ACTIONS: + event.fail(f"Action '{key}' not permitted.") + return + + value = event.params.get("value", "") + self.set_secret("app", key, value) + + # update clients data + if len(self.provider.relations) > 0: + self._update_clients_data(key, value) + + event.set_results({key: value}) + self.unit.status = ActiveStatus(f"{key} changed on connect_client!") + + def _update_clients_data(self, key: str, value: str) -> None: + """Updates connect clients data.""" + if key not in ("username", "password", "endpoints"): + return + for relation in self.provider.relations: + model = self.provider.interface.build_model( + relation.id, DataContractV1[ResourceProviderModel] + ) + for request in model.requests: + if key in ("username", "password"): + setattr(request, key, SecretStr(self.get_secret("app", key))) + else: + setattr(request, key, value) + self.provider.interface.write_model(relation.id, model) + + def _on_reset_unit_status(self, event: ActionEvent): + """Reset the status message of the unit.""" + self.unit.status = ActiveStatus() + event.set_results({"Status": "Reset unit status message"}) + + +if __name__ == "__main__": + main(KafkaConnectCharm) diff --git a/tests/v1/integration/opensearch-charm/actions.yaml b/tests/v1/integration/opensearch-charm/actions.yaml new file mode 100644 index 00000000..fbf178c3 --- /dev/null +++ b/tests/v1/integration/opensearch-charm/actions.yaml @@ -0,0 +1,4 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. +change-admin-password: + description: Change the admin password to a new, generated value diff --git a/tests/v1/integration/opensearch-charm/charmcraft.yaml b/tests/v1/integration/opensearch-charm/charmcraft.yaml new file mode 100644 index 00000000..b4317391 --- /dev/null +++ b/tests/v1/integration/opensearch-charm/charmcraft.yaml @@ -0,0 +1,84 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +type: charm +# Whenever "bases" is changed: +# - Update tests/integration/conftest.py::pytest_configure() +# - Update .github/workflow/ci.yaml integration-test matrix +platforms: + ubuntu@22.04:amd64: + ubuntu@24.04:amd64: + +# Files implicitly created by charmcraft without a part: +# - dispatch (https://github.com/canonical/charmcraft/pull/1898) +# - manifest.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L259) +# Files implicitly copied/"staged" by charmcraft without a part: +# - actions.yaml, config.yaml, metadata.yaml +# (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L290-L293 +# https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/services/package.py#L156-L157) +parts: + poetry-deps: + plugin: nil + build-packages: + - curl + override-build: | + # Use environment variable instead of `--break-system-packages` to avoid failing on older + # versions of pip that do not recognize `--break-system-packages` + # `--user` needed (in addition to `--break-system-packages`) for Ubuntu >=24.04 + PIP_BREAK_SYSTEM_PACKAGES=true python3 -m pip install --user --upgrade pip==24.3.1 # renovate: charmcraft-pip-latest + + # Use uv to install poetry so that a newer version of Python can be installed if needed by poetry + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/uv/releases/download/0.5.15/uv-installer.sh | sh # renovate: charmcraft-uv-latest + # poetry 2.0.0 requires Python >=3.9 + if ! "$HOME/.local/bin/uv" python find '>=3.9' + then + # Use first Python version that is >=3.9 and available in an Ubuntu LTS + # (to reduce the number of Python versions we use) + "$HOME/.local/bin/uv" python install 3.10.12 # renovate: charmcraft-python-ubuntu-22.04 + fi + "$HOME/.local/bin/uv" tool install --no-python-downloads --python '>=3.9' poetry==2.0.0 --with poetry-plugin-export==1.8.0 # renovate: charmcraft-poetry-latest + + ln -sf "$HOME/.local/bin/poetry" /usr/local/bin/poetry + # "charm-poetry" part name is arbitrary; use for consistency + # Avoid using "charm" part name since that has special meaning to charmcraft + charm-poetry: + # By default, the `poetry` plugin creates/stages these directories: + # - lib, src + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L76-L78) + # - venv + # (https://github.com/canonical/charmcraft/blob/9ff19c328e23b50cc06f04e8a5ad4835740badf4/charmcraft/parts/plugins/_poetry.py#L95 + # https://github.com/canonical/craft-parts/blob/afb0d652eb330b6aaad4f40fbd6e5357d358de47/craft_parts/plugins/base.py#L270) + plugin: poetry + source: . + after: + - poetry-deps + poetry-export-extra-args: ['--only', 'main,charm-libs'] + build-packages: + - libffi-dev # Needed to build Python dependencies with Rust from source + - libssl-dev # Needed to build Python dependencies with Rust from source + - pkg-config # Needed to build Python dependencies with Rust from source + override-build: | + # Workaround for https://github.com/canonical/charmcraft/issues/2068 + # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source + if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] + then + snap install rustup --classic + else + apt-get install rustup -y + fi + + # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu + # archive—which means the rustup version could be updated at any time. Print rustup version + # to build log to make changes to the snap's rustup version easier to track + rustup --version + + # rpds-py (Python package) >=0.19.0 requires rustc >=1.76, which is not available in the + # Ubuntu 22.04 archive. Install rustc and cargo using rustup instead of the Ubuntu archive + rustup set profile minimal + rustup default 1.83.0 # renovate: charmcraft-rust-latest + + + craftctl default + # Include requirements.txt in *.charm artifact for easier debugging + cp requirements.txt "$CRAFT_PART_INSTALL/requirements.txt" diff --git a/tests/v1/integration/opensearch-charm/lib/charms/data_platform_libs/v1/.gitkeep b/tests/v1/integration/opensearch-charm/lib/charms/data_platform_libs/v1/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/tests/v1/integration/opensearch-charm/metadata.yaml b/tests/v1/integration/opensearch-charm/metadata.yaml new file mode 100644 index 00000000..2afaa0cf --- /dev/null +++ b/tests/v1/integration/opensearch-charm/metadata.yaml @@ -0,0 +1,16 @@ +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. +name: opensearch +description: | + Toy charm used to emulate opensearch in integration tests. +summary: | + Charm used to mimic the opensearch charm for test purpose only. + + +peers: + opensearch-peers: + interface: opensearch_peers + +provides: + opensearch-client: + interface: opensearch_client diff --git a/tests/v1/integration/opensearch-charm/pyproject.toml b/tests/v1/integration/opensearch-charm/pyproject.toml new file mode 100644 index 00000000..8ff16202 --- /dev/null +++ b/tests/v1/integration/opensearch-charm/pyproject.toml @@ -0,0 +1,18 @@ +[tool.poetry] +package-mode = false +requires-poetry = ">=2.0.0" + +[tool.poetry.dependencies] +python = "^3.10.12" +ops = ">=2.0.0,<3.0.0" +pydantic = ">=2.11" + +[tool.poetry.group.charm-libs.dependencies] +ops = ">=2.0.0" +pydantic = ">=2.11" + +[tool.poetry.requires-plugins] +poetry-plugin-export = ">=1.8" + +[build-system] +build-backend = "poetry.core.masonry.api" diff --git a/tests/v1/integration/opensearch-charm/src/charm.py b/tests/v1/integration/opensearch-charm/src/charm.py new file mode 100755 index 00000000..b52b20da --- /dev/null +++ b/tests/v1/integration/opensearch-charm/src/charm.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. + +"""OpenSearch provider charm that accepts connections from application charms. + +This charm is meant to be used only for testing the libraries in this repository. +""" + +import logging +import secrets +import string + +from ops.charm import ActionEvent, CharmBase +from ops.main import main +from ops.model import ActiveStatus, MaintenanceStatus +from pydantic import SecretStr + +from charms.data_platform_libs.v1.data_interfaces import ( + DataContractV1, + RequirerCommonModel, + ResourceEntityRequestedEvent, + ResourceProviderEventHandler, + ResourceProviderModel, + ResourceRequestedEvent, + SecretBool, +) + +logger = logging.getLogger(__name__) + +PEER = "opensearch-peers" +REL = "opensearch-client" + + +class OpenSearchCharm(CharmBase): + """OpenSearch charm that accepts connections from application charms.""" + + def __init__(self, *args): + super().__init__(*args) + + # Default charm events. + self.framework.observe(self.on.start, self._on_start) + + # Charm events defined in the OpenSearchProvides charm library. + self.opensearch_provider = ResourceProviderEventHandler(self, REL, RequirerCommonModel) + self.framework.observe( + self.opensearch_provider.on.resource_requested, + self._on_index_requested, + ) + self.framework.observe( + self.opensearch_provider.on.resource_entity_requested, + self._on_index_entity_requested, + ) + self.framework.observe(self.on[PEER].relation_joined, self._on_peer_relation_joined) + self.framework.observe( + self.on.change_admin_password_action, self._on_change_admin_password + ) + + def _on_peer_relation_joined(self, _): + pass + + @property + def app_peer_data(self) -> dict: + """Application peer relation data object.""" + relation = self.model.get_relation(PEER) + if not relation: + return {} + + return dict(relation.data[self.app]) + + def get_secret(self, scope: str, key: str) -> str | None: + """Get secret from the secret storage.""" + if scope == "app": + return self.app_peer_data.get(key, None) + else: + raise RuntimeError("Unknown secret scope.") + + def set_secret(self, scope: str, key: str, value: str | None) -> None: + """Set secret in the secret storage.""" + if scope == "app": + if not value: + del self.app_peer_data[key] + return + self.app_peer_data.update({key: value}) + else: + raise RuntimeError("Unknown secret scope.") + + def _on_start(self, _) -> None: + """Only sets an active status.""" + self.unit.status = ActiveStatus("OpenSearch Ready!") + + def _on_change_admin_password(self, event: ActionEvent): + """Change the admin password.""" + password = self._new_password() + for relation in self.opensearch_provider.relations: + model = self.opensearch_provider.interface.build_model( + relation.id, DataContractV1[ResourceProviderModel] + ) + for request in model.requests: + request.password = SecretStr(password) + self.opensearch_provider.interface.write_model(relation.id, model) + + def _on_index_requested(self, event: ResourceRequestedEvent[RequirerCommonModel]): + """Handle the on_index_requested event.""" + self.unit.status = MaintenanceStatus("Creating connection") + # retrieve index name from the requirer side + index = event.request.resource + + relation_id = event.relation.id + + username = "admin" + password = "password" + endpoints = "host1:port,host2:port" + self.set_secret("app", "username", username) + self.set_secret("app", "password", password) + self.set_secret("app", "endpoints", endpoints) + # set connection info in the databag relation + response = ResourceProviderModel( + salt=event.request.salt, + request_id=event.request.request_id, + resource=index, + username=SecretStr(username), + password=SecretStr(password), + tls_ca=SecretStr("Canonical"), + endpoints=endpoints, + tls=SecretBool(True), + ) + self.opensearch_provider.set_response(relation_id, response) + self.unit.status = ActiveStatus(f"index: {index} granted!") + + def _on_index_entity_requested(self, event: ResourceEntityRequestedEvent): + """Handle the on_index_entity_requested event.""" + self.unit.status = MaintenanceStatus("Creating entity") + + rolename = "admin" + password = "password" + self.set_secret("app", "entity-name", rolename) + self.set_secret("app", "entity-password", password) + response = ResourceProviderModel( + salt=event.request.salt, + request_id=event.request.request_id, + entity_name=SecretStr(rolename), + entity_password=SecretStr(password), + ) + # set connection info in the databag relation + self.opensearch_provider.set_response(event.relation.id, response) + self.unit.status = ActiveStatus(f"entity: {rolename} created!") + + def _on_reset_unit_status(self, event: ActionEvent): + """Reset the status message of the unit.""" + self.unit.status = ActiveStatus() + event.set_results({"Status": "Reset unit status message"}) + + def _new_password(self) -> str: + """Generate a random password string. + + Returns: + A random password string. + """ + choices = string.ascii_letters + string.digits + return "".join([secrets.choice(choices) for i in range(16)]) + + +if __name__ == "__main__": + main(OpenSearchCharm) diff --git a/tests/v1/integration/test_backward_compatibility_charm.py b/tests/v1/integration/test_backward_compatibility_charm.py new file mode 100644 index 00000000..682102e1 --- /dev/null +++ b/tests/v1/integration/test_backward_compatibility_charm.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. +import asyncio +import json +from pathlib import Path + +import pytest +import yaml +from pytest_operator.plugin import OpsTest + +from .helpers import PROV_SECRET_PREFIX, get_application_relation_data, get_juju_secret + +APPLICATION_APP_NAME = "backward-app" +RELATION_NAME = "backward-database" +DATABASE_APP_NAME = "database" + +APP_NAMES = [APPLICATION_APP_NAME, DATABASE_APP_NAME] + +DATABASE_APP_METADATA = yaml.safe_load( + Path("./tests/v1/integration/database-charm/metadata.yaml").read_text() +) + + +@pytest.mark.abort_on_fail +@pytest.mark.log_errors_allowed +@pytest.mark.skip_if_deployed +async def test_deploy_charms(ops_test: OpsTest, backward_compatibility_charm, database_charm): + """Deploy both charms (application and the testing kafka app) to use in the tests.""" + # Deploy both charms (1 unit for each application to test that later they correctly + # set data in the relation application databag using only the leader unit). + await asyncio.gather( + ops_test.model.deploy( + backward_compatibility_charm, + application_name=APPLICATION_APP_NAME, + num_units=1, + series="jammy", + ), + ops_test.model.deploy( + database_charm, + application_name=DATABASE_APP_NAME, + resources={ + "database-image": DATABASE_APP_METADATA["resources"]["database-image"][ + "upstream-source" + ] + }, + num_units=1, + series="jammy", + ), + ) + await ops_test.model.wait_for_idle( + apps=[DATABASE_APP_NAME], status="active", wait_for_exact_units=1 + ) + await ops_test.model.wait_for_idle( + apps=[APPLICATION_APP_NAME], status="active", wait_for_exact_units=1 + ) + + +@pytest.mark.abort_on_fail +async def test_backward_relation_with_charm_libraries_secrets(ops_test: OpsTest): + """Test basic functionality of kafka relation interface.""" + # Relate the charms and wait for them exchanging some connection data. + rel = await ops_test.model.add_relation( + DATABASE_APP_NAME, f"{APPLICATION_APP_NAME}:{RELATION_NAME}" + ) + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + # check unit message to check if the topic_created_event is triggered + for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: + assert unit.workload_status_message == "backward_database_created" + + # Get the requests + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, RELATION_NAME, "requests" + ) + or "[]" + ) + request = requests[0] + secret_uri = request[f"{PROV_SECRET_PREFIX}user"] + secret_data = await get_juju_secret(ops_test, secret_uri) + username = secret_data["username"] + password = secret_data["password"] + endpoints = request["endpoints"] + database = request["resource"] + + assert username == f"relation_{rel.id}_None" + assert len(password) == 16 + assert endpoints + assert database == "bwclient" diff --git a/tests/v1/integration/test_charm.py b/tests/v1/integration/test_charm.py new file mode 100644 index 00000000..ff91235a --- /dev/null +++ b/tests/v1/integration/test_charm.py @@ -0,0 +1,1502 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. +import asyncio +import json +import logging +from pathlib import Path +from time import sleep + +import psycopg2 +import pytest +import yaml +from pytest_operator.plugin import OpsTest + +from .helpers import ( + build_connection_string, + check_logs, + get_application_relation_data, + get_juju_secret, + get_leader_id, + get_non_leader_id, + get_secret_by_label, + get_secret_revision_by_label, + list_juju_secrets, +) + +logger = logging.getLogger(__name__) + +APPLICATION_APP_NAME = "application" +DATABASE_APP_NAME = "database" +DATABASE_DUMMY_APP_NAME = "dummy-database" +ANOTHER_DATABASE_APP_NAME = "another-database" +APP_NAMES = [APPLICATION_APP_NAME, DATABASE_APP_NAME, ANOTHER_DATABASE_APP_NAME] +DATABASE_APP_METADATA = yaml.safe_load( + Path("./tests/v1/integration/database-charm/metadata.yaml").read_text() +) +DATABASE_DUMMY_APP_METADATA = yaml.safe_load( + Path("./tests/v1/integration/dummy-database-charm/metadata.yaml").read_text() +) + +DB_FIRST_DATABASE_RELATION_NAME = "first-database-db" +DB_SECOND_DATABASE_RELATION_NAME = "second-database-db" +ROLES_FIRST_DATABASE_RELATION_NAME = "first-database-roles" + +MULTIPLE_DATABASE_CLUSTERS_RELATION_NAME = "multiple-database-clusters" +ALIASED_MULTIPLE_DATABASE_CLUSTERS_RELATION_NAME = "aliased-multiple-database-clusters" + +SECRET_REF_PREFIX = "secret-" + +NUM_DB = 3 +NUM_DUMMY_DB = 2 +NUM_OTHER_DB = 1 +NUM_APP = 2 + + +@pytest.mark.abort_on_fail +async def test_deploy_charms( + ops_test: OpsTest, + application_charm, + database_charm, + dummy_database_charm, + dp_libs_ubuntu_series, +): + """Deploy both charms (application and database) to use in the tests.""" + # Deploy both charms (2 units for each application to test that later they correctly + # set data in the relation application databag using only the leader unit). + await asyncio.gather( + ops_test.model.deploy( + application_charm, + application_name=APPLICATION_APP_NAME, + num_units=NUM_APP, + series=dp_libs_ubuntu_series, + ), + ops_test.model.deploy( + database_charm, + resources={ + "database-image": DATABASE_APP_METADATA["resources"]["database-image"][ + "upstream-source" + ] + }, + application_name=DATABASE_APP_NAME, + num_units=NUM_DB, + series=dp_libs_ubuntu_series, + ), + ops_test.model.deploy( + dummy_database_charm, + resources={ + "database-image": DATABASE_DUMMY_APP_METADATA["resources"]["database-image"][ + "upstream-source" + ] + }, + application_name=DATABASE_DUMMY_APP_NAME, + num_units=NUM_DUMMY_DB, + series=dp_libs_ubuntu_series, + ), + ops_test.model.deploy( + database_charm, + resources={ + "database-image": DATABASE_APP_METADATA["resources"]["database-image"][ + "upstream-source" + ] + }, + application_name=ANOTHER_DATABASE_APP_NAME, + series=dp_libs_ubuntu_series, + ), + ) + + await asyncio.gather( + ops_test.model.wait_for_idle( + apps=[APPLICATION_APP_NAME], status="active", wait_for_exact_units=NUM_APP + ), + ops_test.model.wait_for_idle( + apps=[DATABASE_APP_NAME], status="active", wait_for_exact_units=NUM_DB + ), + ops_test.model.wait_for_idle( + apps=[DATABASE_DUMMY_APP_NAME], status="active", wait_for_exact_units=NUM_DUMMY_DB + ), + ops_test.model.wait_for_idle( + apps=[ANOTHER_DATABASE_APP_NAME], status="active", wait_for_exact_units=NUM_OTHER_DB + ), + ) + + +@pytest.mark.abort_on_fail +@pytest.mark.parametrize("component", ["app", "unit"]) +async def test_peer_relation_secrets(component, ops_test: OpsTest): + """Testing peer relation using the DataPeer class.""" + # Setting and verifying two secret fields + leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) + unit_name = f"{DATABASE_APP_NAME}/{leader_id}" + + # Generally we shouldn't have test decision based on pytest.mark.parametrize + # but I think this is a valid exception + owner = "database" if component == "app" else unit_name + + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-relation-field", + **{"component": component, "field": "monitor-password", "value": "blablabla"}, + ) + await action.wait() + + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-relation-field", + **{"component": component, "field": "secret-field", "value": "blablabla2"}, + ) + await action.wait() + + secret = await get_secret_by_label(ops_test, f"database-peers.database.{component}", owner) + assert secret.get("monitor-password") == "blablabla" + assert secret.get("secret-field") == "blablabla2" + + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": component, "field": "monitor-password"} + ) + await action.wait() + assert action.results.get("value") == "blablabla" + + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": component, "field": "secret-field"} + ) + await action.wait() + assert action.results.get("value") == "blablabla2" + + # Setting and verifying a non-secret field + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-relation-field", + **{"component": component, "field": "not-a-secret", "value": "plain text"}, + ) + await action.wait() + + secret = await get_secret_by_label(ops_test, f"database-peers.database.{component}", owner) + assert not secret.get("not-a-secret") + + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": component, "field": "not-a-secret"} + ) + await action.wait() + assert action.results.get("value") == "plain text" + + # Deleting all fields + action = await ops_test.model.units.get(unit_name).run_action( + "delete-peer-relation-field", **{"component": component, "field": "monitor-password"} + ) + await action.wait() + + secret = await get_secret_by_label(ops_test, f"database-peers.database.{component}", owner) + assert secret.get("secret-field") == "blablabla2" + assert secret.get("monitor-password") is None + + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": component, "field": "monitor-password"} + ) + await action.wait() + assert not action.results.get("value") + + action = await ops_test.model.units.get(unit_name).run_action( + "delete-peer-relation-field", **{"component": component, "field": "not-a-secret"} + ) + await action.wait() + + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": component, "field": "not-a-secret"} + ) + await action.wait() + assert not action.results.get("value") + + assert not ( + await get_application_relation_data( + ops_test, DATABASE_APP_NAME, "database-peers", "not-a-secret", app_or_unit=component + ) + ) + + # Internal secret URI is not saved on the databag + assert not ( + await get_application_relation_data( + ops_test, DATABASE_APP_NAME, "database-peers", "internal-secret", app_or_unit=component + ) + ) + + # Cleanup + action = await ops_test.model.units.get(unit_name).run_action( + "delete-peer-secret", **{"component": component} + ) + await action.wait() + + +@pytest.mark.abort_on_fail +@pytest.mark.parametrize("component", ["app", "unit"]) +async def test_peer_relation_secret_revisions(component, ops_test: OpsTest): + """Check that only a content change triggers the emission of a new revision.""" + # Given + leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) + unit_name = f"{DATABASE_APP_NAME}/{leader_id}" + owner = "database" if component == "app" else unit_name + + # When + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-relation-field", + **{"component": component, "field": "secret-field", "value": "blablabla"}, + ) + await action.wait() + + original_secret_revision = await get_secret_revision_by_label( + ops_test, f"database-peers.database.{component}", owner + ) + + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-relation-field", + **{"component": component, "field": "secret-field", "value": "blablabla2"}, + ) + await action.wait() + + changed_secret_revision = await get_secret_revision_by_label( + ops_test, f"database-peers.database.{component}", owner + ) + + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-relation-field", + **{"component": component, "field": "secret-field", "value": "blablabla2"}, + ) + await action.wait() + + unchanged_secret_revision = await get_secret_revision_by_label( + ops_test, f"database-peers.database.{component}", owner + ) + + # Then + assert original_secret_revision + 1 == changed_secret_revision + assert changed_secret_revision == unchanged_secret_revision + + +@pytest.mark.abort_on_fail +@pytest.mark.usefixtures("only_with_juju_secrets") +@pytest.mark.parametrize("component", ["app", "unit"]) +async def test_peer_relation_set_secret(component, ops_test: OpsTest): + """Testing peer relation using the DataPeer class.""" + # Setting and verifying two secret fields + leader_id = await get_leader_id(ops_test, DATABASE_DUMMY_APP_NAME) + unit_name = f"{DATABASE_DUMMY_APP_NAME}/{leader_id}" + + # Generally we shouldn't have test decision based on pytest.mark.parametrize + # but I think this is a valid exception + owner = "dummy-database" if component == "app" else unit_name + + # Setting a new secret field dynamically + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-secret", + **{"component": component, "field": "new-field", "value": "blablabla"}, + ) + await action.wait() + + secret = await get_secret_by_label( + ops_test, f"database-peers.dummy-database.{component}", owner + ) + assert secret.get("new-field") == "blablabla" + + # Setting a new secret field dynamically in a new, dedicated secret + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-secret", + **{ + "component": component, + "field": "mygroup-field1", + "value": "blablabla3", + "group": "mygroup", + }, + ) + await action.wait() + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-secret", + **{ + "component": component, + "field": "mygroup-field2", + "value": "blablabla4", + "group": "mygroup", + }, + ) + await action.wait() + + secret = await get_secret_by_label( + ops_test, f"database-peers.dummy-database.{component}.mygroup", owner + ) + assert secret.get("mygroup-field1") == "blablabla3" + assert secret.get("mygroup-field2") == "blablabla4" + + # Getting the secret + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": component, "field": "new-field"} + ) + await action.wait() + assert action.results.get("value") == "blablabla" + + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": component, "field": "mygroup-field1@mygroup"} + ) + await action.wait() + assert action.results.get("value") == "blablabla3" + + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": component, "field": "mygroup-field2@mygroup"} + ) + await action.wait() + assert action.results.get("value") == "blablabla4" + + # Cleanup + action = await ops_test.model.units.get(unit_name).run_action( + "delete-peer-secret", **{"component": component} + ) + await action.wait() + + action = await ops_test.model.units.get(unit_name).run_action( + "delete-peer-secret", **{"component": component, "group": "mygroup"} + ) + await action.wait() + + +@pytest.mark.abort_on_fail +@pytest.mark.usefixtures("only_with_juju_secrets") +async def test_peer_relation_non_leader_unit_secrets(ops_test: OpsTest): + """Testing peer relation using the DataPeer class.""" + # Setting and verifying two secret fields + non_leader_unit_id = await get_non_leader_id(ops_test, DATABASE_APP_NAME) + unit_name = f"{DATABASE_APP_NAME}/{non_leader_unit_id}" + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-relation-field", + **{"component": "unit", "field": "monitor-password", "value": "blablabla"}, + ) + await action.wait() + + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-relation-field", + **{"component": "unit", "field": "secret-field", "value": "blablabla2"}, + ) + await action.wait() + + secret = await get_secret_by_label(ops_test, "database-peers.database.unit", unit_name) + assert secret.get("monitor-password") == "blablabla" + assert secret.get("secret-field") == "blablabla2" + + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": "unit", "field": "monitor-password"} + ) + await action.wait() + assert action.results.get("value") == "blablabla" + + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": "unit", "field": "secret-field"} + ) + await action.wait() + assert action.results.get("value") == "blablabla2" + + # Setting and verifying a non-secret field + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-relation-field", + **{"component": "unit", "field": "not-a-secret", "value": "plain text"}, + ) + await action.wait() + + secret = await get_secret_by_label(ops_test, "database-peers.database.unit", unit_name) + assert not secret.get("not-a-secret") + + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": "unit", "field": "not-a-secret"} + ) + await action.wait() + assert action.results.get("value") == "plain text" + + # Deleting all fields + action = await ops_test.model.units.get(unit_name).run_action( + "delete-peer-relation-field", **{"component": "unit", "field": "monitor-password"} + ) + await action.wait() + + secret = await get_secret_by_label(ops_test, "database-peers.database.unit", unit_name) + assert secret.get("secret-field") == "blablabla2" + assert secret.get("monitor-password") == "#DELETED#" + + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": "unit", "field": "monitor-password"} + ) + await action.wait() + assert not action.results.get("value") + + action = await ops_test.model.units.get(unit_name).run_action( + "delete-peer-relation-field", **{"component": "unit", "field": "not-a-secret"} + ) + await action.wait() + + action = await ops_test.model.units.get(unit_name).run_action( + "get-peer-relation-field", **{"component": "unit", "field": "not-a-secret"} + ) + await action.wait() + assert not action.results.get("value") + + assert not ( + await get_application_relation_data( + ops_test, DATABASE_APP_NAME, "database-peers", "not-a-secret", app_or_unit="unit" + ) + ) + + # Internal secret URI is not saved on the databag + assert not ( + await get_application_relation_data( + ops_test, DATABASE_APP_NAME, "database-peers", "internal-secret", app_or_unit="unit" + ) + ) + + # Cleanup + action = await ops_test.model.units.get(unit_name).run_action( + "delete-peer-secret", **{"component": "unit"} + ) + await action.wait() + + +@pytest.mark.abort_on_fail +async def test_peer_relation_non_leader_can_read_app_data(ops_test: OpsTest): + """Testing peer relation using the DataPeer class.""" + # Setting and verifying two secret fields + leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) + unit_name = f"{DATABASE_APP_NAME}/{leader_id}" + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-relation-field", + **{"component": "app", "field": "monitor-password", "value": "blablabla"}, + ) + await action.wait() + + action = await ops_test.model.units.get(unit_name).run_action( + "set-peer-relation-field", + **{"component": "app", "field": "not-a-secret", "value": "plain text"}, + ) + await action.wait() + + # Checking that non-leader unit can fetch any (i.e. also app) secret + non_leader_unit_id = await get_non_leader_id(ops_test, DATABASE_APP_NAME) + non_leader_unit_name = f"{DATABASE_APP_NAME}/{non_leader_unit_id}" + action = await ops_test.model.units.get(non_leader_unit_name).run_action( + "get-peer-relation-field", **{"component": "app", "field": "monitor-password"} + ) + await action.wait() + assert action.results.get("value") == "blablabla" + + action = await ops_test.model.units.get(non_leader_unit_name).run_action( + "get-peer-relation-field", **{"component": "app", "field": "not-a-secret"} + ) + await action.wait() + assert action.results.get("value") == "plain text" + + +@pytest.mark.abort_on_fail +async def test_other_peer_relation(ops_test: OpsTest): + """Testing peer relation using the DataPeer class.""" + # Setting and verifying two secret fields + component = "unit" + units = ops_test.model.applications[DATABASE_APP_NAME].units + for unit in units: + action = await unit.run_action( + "set-peer-relation-field", + **{"component": component, "field": "monitor-password", "value": "blablabla"}, + ) + await action.wait() + + action = await unit.run_action( + "set-peer-relation-field", + **{"component": component, "field": "non-secret-field", "value": "blablabla2"}, + ) + await action.wait() + + for main_unit in units: + action = await main_unit.run_action( + "get-other-peer-relation-field", **{"field": "monitor-password"} + ) + await action.wait() + + for unit in units: + if unit != main_unit: + assert action.results.get(unit.name.replace("/", "-")) == "blablabla" + + action = await main_unit.run_action( + "get-other-peer-relation-field", **{"field": "non-secret-field"} + ) + await action.wait() + + for unit in units: + if unit != main_unit: + assert action.results.get(unit.name.replace("/", "-")) == "blablabla2" + + +@pytest.mark.abort_on_fail +async def test_other_peer_relation_scale(ops_test: OpsTest): + """The scaling test is the 'continuation' of the previous (test_other_peer_relation()) test. + + We assume data set up there. + """ + await ops_test.model.applications[DATABASE_APP_NAME].scale(scale_change=-1) + await ops_test.model.wait_for_idle( + apps=[DATABASE_APP_NAME], status="active", wait_for_exact_units=2 + ) + units = ops_test.model.applications[DATABASE_APP_NAME].units + + for main_unit in units: + action = await main_unit.run_action( + "get-other-peer-relation-field", **{"field": "monitor-password"} + ) + await action.wait() + + for unit in units: + if unit != main_unit: + assert action.results.get(unit.name.replace("/", "-")) == "blablabla" + + action = await main_unit.run_action( + "get-other-peer-relation-field", **{"field": "non-secret-field"} + ) + await action.wait() + + for unit in units: + if unit != main_unit: + assert action.results.get(unit.name.replace("/", "-")) == "blablabla2" + + await ops_test.model.applications[DATABASE_APP_NAME].add_units(count=1) + await ops_test.model.wait_for_idle( + apps=[DATABASE_APP_NAME], status="active", wait_for_exact_units=3 + ) + new_units = ops_test.model.applications[DATABASE_APP_NAME].units + unit = list(set(new_units) - set(units))[0] + + for main_unit in units: + action = await main_unit.run_action( + "get-other-peer-relation-field", **{"field": "monitor-password"} + ) + await action.wait() + + assert action.results.get(unit) is None + + action = await main_unit.run_action( + "get-other-peer-relation-field", **{"field": "non-secret-field"} + ) + await action.wait() + + assert action.results.get(unit) is None + + +@pytest.mark.abort_on_fail +async def test_database_relation_with_charm_libraries(ops_test: OpsTest): + """Test basic functionality of database relation interface.""" + # Relate the charms and wait for them exchanging some connection data. + + pytest.first_database_relation = await ops_test.model.add_relation( + f"{APPLICATION_APP_NAME}:{DB_FIRST_DATABASE_RELATION_NAME}", DATABASE_APP_NAME + ) + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + # Get the connection string to connect to the database. + connection_string = await build_connection_string( + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME + ) + + # Connect to the database. + with psycopg2.connect(connection_string) as connection, connection.cursor() as cursor: + # Check that it's possible to write and read data from the database that + # was created for the application. + connection.autocommit = True + cursor.execute("DROP TABLE IF EXISTS test;") + cursor.execute("CREATE TABLE test(data TEXT);") + cursor.execute("INSERT INTO test(data) VALUES('some data');") + cursor.execute("SELECT data FROM test;") + data = cursor.fetchone() + assert data[0] == "some data" + + # Check the version that the application received is the same on the database server. + cursor.execute("SELECT version();") + data = cursor.fetchone() + + # Get the version of the database and compare with the information that + # was retrieved directly from the database. + version = await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, "version" + ) + assert version == data[0] + + +async def test_user_with_extra_roles(ops_test: OpsTest): + """Test superuser actions and the request for more permissions.""" + # Get the connection string to connect to the database. + connection_string = await build_connection_string( + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME + ) + + # Connect to the database. + connection = psycopg2.connect(connection_string) + connection.autocommit = True + cursor = connection.cursor() + + # Test the user can create a database and another user. + cursor.execute("CREATE DATABASE another_database;") + cursor.execute("CREATE USER another_user WITH ENCRYPTED PASSWORD 'test-password';") + + cursor.close() + connection.close() + + +@pytest.mark.abort_on_fail +async def test_postgresql_plugin(ops_test: OpsTest): + """Test that the application charm can check whether a plugin is enabled.""" + # Check that the plugin is disabled. + unit_name = f"{APPLICATION_APP_NAME}/0" + action = await ops_test.model.units.get(unit_name).run_action( + "get-plugin-status", **{"plugin": "citext"} + ) + await action.wait() + assert action.results.get("plugin-status") == "disabled" + + # Connect to the database and enable the plugin (PostgreSQL extension). + connection_string = await build_connection_string( + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME + ) + with psycopg2.connect(connection_string) as connection, connection.cursor() as cursor: + connection.autocommit = True + cursor.execute("CREATE EXTENSION citext;") + connection.close() + + # Check that the plugin is enabled. + action = await ops_test.model.units.get(unit_name).run_action( + "get-plugin-status", **{"plugin": "citext"} + ) + await action.wait() + assert action.results.get("plugin-status") == "enabled" + + +async def test_two_applications_dont_share_the_same_relation_data( + ops_test: OpsTest, application_charm +): + """Test that two different application connect to the database with different credentials.""" + # Set some variables to use in this test. + another_application_app_name = "another-application" + all_app_names = [another_application_app_name] + all_app_names.extend(APP_NAMES) + + # Deploy another application. + await ops_test.model.deploy( + application_charm, application_name=another_application_app_name, series="jammy" + ) + await ops_test.model.wait_for_idle(apps=all_app_names, status="active") + + # Relate the new application with the database + # and wait for them exchanging some connection data. + await ops_test.model.add_relation( + f"{another_application_app_name}:{DB_FIRST_DATABASE_RELATION_NAME}", DATABASE_APP_NAME + ) + await ops_test.model.wait_for_idle(apps=all_app_names, status="active") + + # Assert the two application have different relation (connection) data. + application_connection_string = await build_connection_string( + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME + ) + another_application_connection_string = await build_connection_string( + ops_test, another_application_app_name, DB_FIRST_DATABASE_RELATION_NAME + ) + assert application_connection_string != another_application_connection_string + + +@pytest.mark.usefixtures("only_without_juju_secrets") +async def test_databag_usage_correct(ops_test: OpsTest, application_charm): + for field in ["username", "password"]: + assert await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, field + ) + + +@pytest.mark.usefixtures("only_with_juju_secrets") +async def test_secrets_usage_correct_secrets(ops_test: OpsTest, application_charm): + for field in ["username", "password", "uris"]: + assert ( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, field + ) + is None + ) + assert await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, "secret-user" + ) + + +@pytest.mark.abort_on_fail +@pytest.mark.usefixtures("only_without_juju_secrets") +async def test_database_roles_relation_with_charm_libraries(ops_test: OpsTest): + """Test basic functionality of database-roles relation interface.""" + # Relate the charms and wait for them exchanging some connection data. + + pytest.first_database_relation = await ops_test.model.add_relation( + f"{APPLICATION_APP_NAME}:{ROLES_FIRST_DATABASE_RELATION_NAME}", DATABASE_APP_NAME + ) + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + entity_name = await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, ROLES_FIRST_DATABASE_RELATION_NAME, "entity-name" + ) + entity_pass = await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, ROLES_FIRST_DATABASE_RELATION_NAME, "entity-password" + ) + + assert entity_name is not None + assert entity_pass is not None + + +@pytest.mark.abort_on_fail +@pytest.mark.usefixtures("only_with_juju_secrets") +async def test_database_roles_relation_with_charm_libraries_secrets(ops_test: OpsTest): + """Test basic functionality of database-roles relation interface.""" + # Relate the charms and wait for them exchanging some connection data. + + pytest.first_database_relation = await ops_test.model.add_relation( + f"{APPLICATION_APP_NAME}:{ROLES_FIRST_DATABASE_RELATION_NAME}", DATABASE_APP_NAME + ) + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + secret_uri = await get_application_relation_data( + ops_test, + APPLICATION_APP_NAME, + ROLES_FIRST_DATABASE_RELATION_NAME, + f"{SECRET_REF_PREFIX}entity", + ) + + secret_content = await get_juju_secret(ops_test, secret_uri) + entity_name = secret_content["entity-name"] + entity_pass = secret_content["entity-password"] + + assert entity_name is not None + assert entity_pass is not None + + +async def test_an_application_can_connect_to_multiple_database_clusters( + ops_test: OpsTest, database_charm +): + """Test that an application can connect to different clusters of the same database.""" + # Relate the application with both database clusters + # and wait for them exchanging some connection data. + first_cluster_relation = await ops_test.model.add_relation( + f"{APPLICATION_APP_NAME}:{MULTIPLE_DATABASE_CLUSTERS_RELATION_NAME}", DATABASE_APP_NAME + ) + # This call enables the unit to be available in the relation changed event. + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + second_cluster_relation = await ops_test.model.add_relation( + f"{APPLICATION_APP_NAME}:{MULTIPLE_DATABASE_CLUSTERS_RELATION_NAME}", + ANOTHER_DATABASE_APP_NAME, + ) + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + # Retrieve the connection string to both database clusters using the relation aliases + # and assert they are different. + application_connection_string = await build_connection_string( + ops_test, + APPLICATION_APP_NAME, + MULTIPLE_DATABASE_CLUSTERS_RELATION_NAME, + relation_id=first_cluster_relation.id, + ) + another_application_connection_string = await build_connection_string( + ops_test, + APPLICATION_APP_NAME, + MULTIPLE_DATABASE_CLUSTERS_RELATION_NAME, + relation_id=second_cluster_relation.id, + ) + assert application_connection_string != another_application_connection_string + + +async def test_an_application_can_connect_to_multiple_aliased_database_clusters( + ops_test: OpsTest, database_charm +): + """Test that an application can connect to different clusters of the same database.""" + # Relate the application with both database clusters + # and wait for them exchanging some connection data. + await ops_test.model.add_relation( + f"{APPLICATION_APP_NAME}:{ALIASED_MULTIPLE_DATABASE_CLUSTERS_RELATION_NAME}", + DATABASE_APP_NAME, + ) + # This call enables the unit to be available in the relation changed event. + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + await ops_test.model.add_relation( + f"{APPLICATION_APP_NAME}:{ALIASED_MULTIPLE_DATABASE_CLUSTERS_RELATION_NAME}", + ANOTHER_DATABASE_APP_NAME, + ) + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + # Retrieve the connection string to both database clusters using the relation aliases + # and assert they are different. + application_connection_string = await build_connection_string( + ops_test, + APPLICATION_APP_NAME, + ALIASED_MULTIPLE_DATABASE_CLUSTERS_RELATION_NAME, + relation_alias="cluster1", + ) + another_application_connection_string = await build_connection_string( + ops_test, + APPLICATION_APP_NAME, + ALIASED_MULTIPLE_DATABASE_CLUSTERS_RELATION_NAME, + relation_alias="cluster2", + ) + assert application_connection_string != another_application_connection_string + + +async def test_an_application_can_request_multiple_databases(ops_test: OpsTest, application_charm): + """Test that an application can request additional databases using the same interface.""" + # Relate the charms using another relation and wait for them exchanging some connection data. + sleep(5) + pytest.second_database_relation = await ops_test.model.add_relation( + f"{APPLICATION_APP_NAME}:{DB_SECOND_DATABASE_RELATION_NAME}", DATABASE_APP_NAME + ) + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + # Get the connection strings to connect to both databases. + first_database_connection_string = await build_connection_string( + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME + ) + second_database_connection_string = await build_connection_string( + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME + ) + + # Assert the two application have different relation (connection) data. + assert first_database_connection_string != second_database_connection_string + + +async def test_external_node_connectivity_field(ops_test: OpsTest, application_charm): + # Check that the flag is missing if not requested + assert ( + await get_application_relation_data( + ops_test, + DATABASE_APP_NAME, + "database", + "external-node-connectivity", + related_endpoint=DB_FIRST_DATABASE_RELATION_NAME, + ) + ) is None + + # Check that the second relation raises the flag + assert ( + await get_application_relation_data( + ops_test, + DATABASE_APP_NAME, + "database", + "external-node-connectivity", + related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, + ) + ) == "true" + + +@pytest.mark.usefixtures("only_with_juju_secrets") +async def test_provider_with_additional_secrets(ops_test: OpsTest, database_charm): + # Let's make sure that there was enough time for the relation initialization to communicate secrets + secret_fields = await get_application_relation_data( + ops_test, + DATABASE_APP_NAME, + DATABASE_APP_NAME, + "requested-secrets", + related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, + ) + assert {"topsecret", "donttellanyone"} <= set(json.loads(secret_fields)) + + # Set secret + leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) + leader_name = f"{DATABASE_APP_NAME}/{leader_id}" + action = await ops_test.model.units.get(leader_name).run_action( + "set-secret", **{"relation_id": pytest.second_database_relation.id, "field": "topsecret"} + ) + await action.wait() + + # Get secret original value + secret_uri = await get_application_relation_data( + ops_test, + APPLICATION_APP_NAME, + DB_SECOND_DATABASE_RELATION_NAME, + f"{SECRET_REF_PREFIX}extra", + ) + + secret_content = await get_juju_secret(ops_test, secret_uri) + topsecret1 = secret_content["topsecret"] + + # Re-set secret + action = await ops_test.model.units.get(leader_name).run_action( + "set-secret", **{"relation_id": pytest.second_database_relation.id, "field": "topsecret"} + ) + await action.wait() + + # Get secret after change + secret_uri = await get_application_relation_data( + ops_test, + APPLICATION_APP_NAME, + DB_SECOND_DATABASE_RELATION_NAME, + f"{SECRET_REF_PREFIX}extra", + ) + + secret_content = await get_juju_secret(ops_test, secret_uri) + topsecret2 = secret_content["topsecret"] + + assert topsecret1 != topsecret2 + + +@pytest.mark.abort_on_fail +@pytest.mark.usefixtures("only_with_juju_secrets") +async def test_relation_secret_revisions(ops_test: OpsTest): + """Check that only a content change triggers the emission of a new revision.""" + # Given + leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) + leader_name = f"{DATABASE_APP_NAME}/{leader_id}" + owner = "database" + rel_id = pytest.second_database_relation.id + group_mapping = "extra" + + # When + action = await ops_test.model.units.get(leader_name).run_action( + "set-secret", **{"relation_id": rel_id, "field": "topsecret", "value": "initialvalue"} + ) + await action.wait() + + original_secret_revision = await get_secret_revision_by_label( + ops_test, f"{DATABASE_APP_NAME}.{rel_id}.{group_mapping}.secret", owner + ) + + action = await ops_test.model.units.get(leader_name).run_action( + "set-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": "topsecret", + "value": "changedvalue", + }, + ) + await action.wait() + + changed_secret_revision = await get_secret_revision_by_label( + ops_test, f"{DATABASE_APP_NAME}.{rel_id}.{group_mapping}.secret", owner + ) + + action = await ops_test.model.units.get(leader_name).run_action( + "set-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": "topsecret", + "value": "changedvalue", + }, + ) + await action.wait() + + unchanged_secret_revision = await get_secret_revision_by_label( + ops_test, f"{DATABASE_APP_NAME}.{rel_id}.{group_mapping}.secret", owner + ) + + # Then + assert original_secret_revision + 1 == changed_secret_revision + assert changed_secret_revision == unchanged_secret_revision + + +@pytest.mark.parametrize("field,value", [("new_field", "blah"), ("tls", "True")]) +@pytest.mark.usefixtures("only_without_juju_secrets") +async def test_provider_get_set_delete_fields(field, value, ops_test: OpsTest): + # Add normal field + leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) + leader_name = f"{DATABASE_APP_NAME}/{leader_id}" + + action = await ops_test.model.units.get(leader_name).run_action( + "set-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": field, + "value": value, + }, + ) + await action.wait() + + assert ( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, field + ) + == value + ) + + # Check all application units can read remote relation data + for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: + action = await unit.run_action( + "get-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": field, + }, + ) + await action.wait() + assert action.results.get("value") == value + + # Check if database can retrieve self-side relation data + action = await ops_test.model.units.get(leader_name).run_action( + "get-relation-self-side-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": field, + "value": value, + }, + ) + await action.wait() + assert action.results.get("value") == value + + # Delete normal field + action = await ops_test.model.units.get(leader_name).run_action( + "delete-relation-field", + **{"relation_id": pytest.second_database_relation.id, "field": field}, + ) + await action.wait() + + assert ( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, field + ) + is None + ) + + # Delete non-existent field + action = await ops_test.model.units.get(leader_name).run_action( + "delete-relation-field", + **{"relation_id": pytest.second_database_relation.id, "field": "doesnt_exist"}, + ) + await action.wait() + # Juju2 syntax + assert int(action.results["Code"]) == 0 + assert await check_logs( + ops_test, + strings=["Non-existing field 'doesnt_exist' was attempted to be removed from the databag"], + ) + + +@pytest.mark.log_errors_allowed( + "Non-existing field 'doesnt_exist' was attempted to be removed from the databag" +) +@pytest.mark.parametrize( + "field,value,relation_field", + [ + ("new_field", "blah", "new_field"), + ("tls", "True", "secret-tls"), + ], +) +@pytest.mark.usefixtures("only_with_juju_secrets") +async def test_provider_get_set_delete_fields_secrets( + field, value, relation_field, ops_test: OpsTest +): + # Add field + leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) + leader_name = f"{DATABASE_APP_NAME}/{leader_id}" + action = await ops_test.model.units.get(leader_name).run_action( + "set-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": field, + "value": value, + }, + ) + await action.wait() + + assert await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, relation_field + ) + + # Check all application units can read remote relation data + for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: + action = await unit.run_action( + "get-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": field, + }, + ) + await action.wait() + assert action.results.get("value") == value + + # Check if database can retrieve self-side relation data + action = await ops_test.model.units.get(leader_name).run_action( + "get-relation-self-side-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": field, + "value": value, + }, + ) + await action.wait() + assert action.results.get("value") == value + + # Delete field + action = await ops_test.model.units.get(leader_name).run_action( + "delete-relation-field", + **{"relation_id": pytest.second_database_relation.id, "field": field}, + ) + await action.wait() + + assert ( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, relation_field + ) + is None + ) + + # Check that the field is deleted + action = await ops_test.model.units.get(leader_name).run_action( + "get-relation-self-side-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": field, + }, + ) + await action.wait() + assert not action.results.get("value") + + # Delete non-existent notmal and secret field + action = await ops_test.model.units.get(leader_name).run_action( + "delete-relation-field", + **{"relation_id": pytest.second_database_relation.id, "field": "doesnt_exist"}, + ) + await action.wait() + assert action.results["return-code"] == 0 + + +@pytest.mark.abort_on_fail +@pytest.mark.log_errors_allowed("Can't delete secret for relation") +@pytest.mark.usefixtures("only_with_juju_secrets") +async def test_provider_deleted_secret_is_removed(ops_test: OpsTest): + """The 'tls' field, that was removed in the previous test has it's secret removed.""" + # Add field + field = "tls" + value = "True" + leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) + leader_name = f"{DATABASE_APP_NAME}/{leader_id}" + action = await ops_test.model.units.get(leader_name).run_action( + "set-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": field, + "value": value, + }, + ) + await action.wait() + + # Get TLS secret pointer + secret_uri = await get_application_relation_data( + ops_test, + APPLICATION_APP_NAME, + DB_SECOND_DATABASE_RELATION_NAME, + f"{SECRET_REF_PREFIX}{field}", + ) + + # Delete field + action = await ops_test.model.units.get(leader_name).run_action( + "delete-relation-field", + **{"relation_id": pytest.second_database_relation.id, "field": field}, + ) + await action.wait() + assert not ( + await check_logs( + ops_test, + strings=["Non-existing field 'tls' was attempted to be removed from the databag"], + ) + ) + assert not (await check_logs(ops_test, strings=["Can't delete secret for relation"])) + + action = await ops_test.model.units.get(leader_name).run_action( + "delete-relation-field", + **{"relation_id": pytest.second_database_relation.id, "field": field}, + ) + await action.wait() + assert await check_logs( + ops_test, strings=["Non-existing field 'tls' was attempted to be removed from the databag"] + ) + assert await check_logs(ops_test, strings=["Can't delete secret for relation"]) + + assert ( + await get_application_relation_data( + ops_test, + APPLICATION_APP_NAME, + DB_SECOND_DATABASE_RELATION_NAME, + f"{SECRET_REF_PREFIX}{field}", + ) + is None + ) + + secrets = await list_juju_secrets(ops_test) + secret_xid = secret_uri.split("/")[-1] + assert secret_xid not in secrets + + +async def test_requires_get_set_delete_fields(ops_test: OpsTest): + # Add normal field + leader_id = await get_leader_id(ops_test, APPLICATION_APP_NAME) + leader_name = f"{APPLICATION_APP_NAME}/{leader_id}" + + action = await ops_test.model.units.get(leader_name).run_action( + "set-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": "new_field", + "value": "blah", + }, + ) + await action.wait() + + assert ( + await get_application_relation_data( + ops_test, + DATABASE_APP_NAME, + DB_SECOND_DATABASE_RELATION_NAME, + "new_field", + related_endpoint="second-database-db", + ) + == "blah" + ) + + # Check all application units can read remote relation data + for unit in ops_test.model.applications[DATABASE_APP_NAME].units: + action = await unit.run_action( + "get-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": "new_field", + }, + ) + await action.wait() + assert action.results.get("value") == "blah" + + # Check if database can retrieve self-side relation data + action = await ops_test.model.units.get(leader_name).run_action( + "get-relation-self-side-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": "new_field", + "value": "blah", + }, + ) + await action.wait() + assert action.results.get("value") == "blah" + + # Delete normal field + action = await ops_test.model.units.get(leader_name).run_action( + "delete-relation-field", + **{"relation_id": pytest.second_database_relation.id, "field": "new_field"}, + ) + await action.wait() + + assert ( + await get_application_relation_data( + ops_test, + DATABASE_APP_NAME, + DB_SECOND_DATABASE_RELATION_NAME, + "new_field", + related_endpoint="second-database-db", + ) + is None + ) + + +@pytest.mark.log_errors_allowed( + "This operation (update_relation_data()) can only be performed by the leader unit" +) +@pytest.mark.log_errors_allowed( + "This operation (delete_relation_data()) can only be performed by the leader unit" +) +async def test_provider_set_delete_fields_leader_only(ops_test: OpsTest): + leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) + leader_name = f"{DATABASE_APP_NAME}/{leader_id}" + action = await ops_test.model.units.get(leader_name).run_action( + "set-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": "new_field", + "value": "blah", + }, + ) + await action.wait() + + unit_id = await get_non_leader_id(ops_test, DATABASE_APP_NAME) + unit_name = f"{DATABASE_APP_NAME}/{unit_id}" + action = await ops_test.model.units.get(unit_name).run_action( + "set-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": "new_field2", + "value": "blah2", + }, + ) + await action.wait() + assert await check_logs( + ops_test, + strings=[ + "This operation (update_relation_data()) can only be performed by the leader unit" + ], + ) + + assert ( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "new_field2" + ) + is None + ) + + action = await ops_test.model.units.get(unit_name).run_action( + "delete-relation-field", + **{"relation_id": pytest.second_database_relation.id, "field": "new_field"}, + ) + await action.wait() + assert await check_logs( + ops_test, + strings=[ + "This operation (delete_relation_data()) can only be performed by the leader unit" + ], + ) + + assert ( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "new_field" + ) + == "blah" + ) + + +async def test_requires_set_delete_fields(ops_test: OpsTest): + # Add field + leader_id = await get_leader_id(ops_test, APPLICATION_APP_NAME) + leader_name = f"{APPLICATION_APP_NAME}/{leader_id}" + action = await ops_test.model.units.get(leader_name).run_action( + "set-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": "new_field_req", + "value": "blah-req", + }, + ) + await action.wait() + + assert ( + await get_application_relation_data( + ops_test, + DATABASE_APP_NAME, + DATABASE_APP_NAME, + "new_field_req", + related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, + ) + == "blah-req" + ) + + # Delete field + action = await ops_test.model.units.get(leader_name).run_action( + "delete-relation-field", + **{"relation_id": pytest.second_database_relation.id, "field": "new_field_req"}, + ) + await action.wait() + + assert ( + await get_application_relation_data( + ops_test, + DATABASE_APP_NAME, + DATABASE_APP_NAME, + "new_field_req", + related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, + ) + is None + ) + + +@pytest.mark.log_errors_allowed( + "This operation (update_relation_data()) can only be performed by the leader unit" +) +@pytest.mark.log_errors_allowed( + "This operation (delete_relation_data()) can only be performed by the leader unit" +) +async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): + leader_id = await get_leader_id(ops_test, APPLICATION_APP_NAME) + leader_name = f"{APPLICATION_APP_NAME}/{leader_id}" + action = await ops_test.model.units.get(leader_name).run_action( + "set-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": "new_field-req", + "value": "blah-req", + }, + ) + await action.wait() + + unit_id = await get_non_leader_id(ops_test, APPLICATION_APP_NAME) + unit_name = f"{APPLICATION_APP_NAME}/{unit_id}" + action = await ops_test.model.units.get(unit_name).run_action( + "set-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": "new_field2-req", + "value": "blah2-req", + }, + ) + await action.wait() + assert await check_logs( + ops_test, + strings=[ + "This operation (update_relation_data()) can only be performed by the leader unit" + ], + ) + + assert ( + await get_application_relation_data( + ops_test, + DATABASE_APP_NAME, + DATABASE_APP_NAME, + "new_field2-req", + related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, + ) + is None + ) + + action = await ops_test.model.units.get(unit_name).run_action( + "delete-relation-field", + **{"relation_id": pytest.second_database_relation.id, "field": "new_field-req"}, + ) + await action.wait() + assert await check_logs( + ops_test, + strings=[ + "This operation (delete_relation_data()) can only be performed by the leader unit" + ], + ) + + assert ( + await get_application_relation_data( + ops_test, + DATABASE_APP_NAME, + DATABASE_APP_NAME, + "new_field-req", + related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, + ) + == "blah-req" + ) + + +async def test_scaling_requires_can_access_shared_secrest(ops_test): + """When scaling up the application, new units should have access to relation secrets.""" + await ops_test.model.applications[APPLICATION_APP_NAME].scale(3) + + await ops_test.model.wait_for_idle( + apps=[APPLICATION_APP_NAME], status="active", timeout=(15 * 60), wait_for_exact_units=3 + ) + + old_unit_name = f"{APPLICATION_APP_NAME}/1" + new_unit_name = f"{APPLICATION_APP_NAME}/2" + + action = await ops_test.model.units.get(old_unit_name).run_action( + "get-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": "password", + }, + ) + await action.wait() + orig_password = action.results.get("value") + + action = await ops_test.model.units.get(new_unit_name).run_action( + "get-relation-field", + **{ + "relation_id": pytest.second_database_relation.id, + "field": "password", + }, + ) + await action.wait() + new_password = action.results.get("value") + assert new_password == orig_password diff --git a/tests/v1/integration/test_kafka_charm.py b/tests/v1/integration/test_kafka_charm.py new file mode 100644 index 00000000..47f8e650 --- /dev/null +++ b/tests/v1/integration/test_kafka_charm.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. +import asyncio +import logging +import subprocess + +import pytest +from pytest_operator.plugin import OpsTest + +from .helpers import get_application_relation_data, get_juju_secret, json + +logger = logging.getLogger(__name__) + +APPLICATION_APP_NAME = "requirer-app" +APPLICATION_APP_NAME_SPLIT = "requirer-app-split" +KAFKA_APP_NAME = "kafka" +APP_NAMES = [APPLICATION_APP_NAME, APPLICATION_APP_NAME_SPLIT, KAFKA_APP_NAME] +ROLES_RELATION_NAME = "kafka-client-roles" +TOPIC_RELATION_NAME = "kafka-client-topic" +TOPIC_RELATION_NAME_SPLIT_PATTERN = "kafka-split-pattern-client" + +PROV_SECRET_PREFIX = "secret-" + + +@pytest.mark.abort_on_fail +@pytest.mark.log_errors_allowed +@pytest.mark.skip_if_deployed +async def test_deploy_charms(ops_test: OpsTest, application_charm, kafka_charm): + """Deploy both charms (application and the testing kafka app) to use in the tests.""" + # Deploy both charms (1 unit for each application to test that later they correctly + # set data in the relation application databag using only the leader unit). + await asyncio.gather( + ops_test.model.deploy( + application_charm, application_name=APPLICATION_APP_NAME, num_units=1, series="jammy" + ), + ops_test.model.deploy( + application_charm, + application_name=APPLICATION_APP_NAME_SPLIT, + num_units=1, + series="jammy", + ), + ops_test.model.deploy( + kafka_charm, application_name=KAFKA_APP_NAME, num_units=1, series="jammy" + ), + ) + await ops_test.model.wait_for_idle( + apps=[KAFKA_APP_NAME], status="active", wait_for_exact_units=1 + ) + await ops_test.model.wait_for_idle( + apps=[APPLICATION_APP_NAME], status="active", wait_for_exact_units=1 + ) + await ops_test.model.wait_for_idle( + apps=[APPLICATION_APP_NAME_SPLIT], status="active", wait_for_exact_units=1 + ) + + +@pytest.mark.abort_on_fail +async def test_kafka_relation_with_charm_libraries_secrets(ops_test: OpsTest): + """Test basic functionality of kafka relation interface.""" + # Relate the charms and wait for them exchanging some connection data. + await ops_test.model.add_relation( + KAFKA_APP_NAME, f"{APPLICATION_APP_NAME}:{TOPIC_RELATION_NAME}" + ) + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + # check unit message to check if the topic_created_event is triggered + for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: + assert unit.workload_status_message == "kafka_topic_created" + # check if the topic was granted + for unit in ops_test.model.applications[KAFKA_APP_NAME].units: + assert "granted" in unit.workload_status_message + + # Get the requests + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, TOPIC_RELATION_NAME, "requests" + ) + or "[]" + ) + request = requests[0] + secret_uri = request[f"{PROV_SECRET_PREFIX}user"] + secret_data = await get_juju_secret(ops_test, secret_uri) + username = secret_data["username"] + password = secret_data["password"] + bootstrap_server = request["endpoints"] + consumer_group_prefix = request["consumer-group-prefix"] + topic = request["resource"] + + assert username == "admin" + assert password == "password" + assert bootstrap_server == "host1:port,host2:port" + assert consumer_group_prefix == "test-prefix" + assert topic == "test-topic" + + +async def test_kafka_bootstrap_server_changed(ops_test: OpsTest): + """Test that the bootstrap server changed event is correctly triggered.""" + app_unit = ops_test.model.applications[APPLICATION_APP_NAME].units[0] + kafka_unit = ops_test.model.applications[KAFKA_APP_NAME].units[0] + # set new bootstrap + parameters = {"bootstrap-server": "host1:port,host2:port,host3:port"} + action = await kafka_unit.run_action(action_name="sync-bootstrap-server", **parameters) + result = await action.wait() + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + assert result.results["bootstrap-server"] == "host1:port,host2:port,host3:port" + + # check that the new bootstrap-server is in the databag + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, TOPIC_RELATION_NAME, "requests" + ) + or "[]" + ) + request = requests[0] + bootstrap_server = request["endpoints"] + assert bootstrap_server == "host1:port,host2:port,host3:port" + + # check that the bootstrap_server_changed event is triggered + for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: + assert unit.workload_status_message == "kafka_bootstrap_server_changed" + + # reset unit message + action = await app_unit.run_action(action_name="reset-unit-status") + result = await action.wait() + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + # check if the message is empty + for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: + assert unit.workload_status_message == "" + # configure the same bootstrap-server + action = await kafka_unit.run_action(action_name="sync-bootstrap-server", **parameters) + result = await action.wait() + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + assert result.results["bootstrap-server"] == "host1:port,host2:port,host3:port" + # check that the new bootstrap-server is in the databag + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, TOPIC_RELATION_NAME, "requests" + ) + or "[]" + ) + request = requests[0] + bootstrap_server = request["endpoints"] + assert bootstrap_server == "host1:port,host2:port,host3:port" + # check the bootstrap_server_changed event is NOT triggered + for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: + assert unit.workload_status_message == "" + + +@pytest.mark.abort_on_fail +@pytest.mark.usefixtures("only_with_juju_secrets") +async def test_kafka_mtls(ops_test: OpsTest): + """Tests mtls-cert is set as a secret from the requirer side and proper event triggered on provider side.""" + # Relate the charms and wait for them exchanging some connection data. + await ops_test.model.add_relation( + KAFKA_APP_NAME, f"{APPLICATION_APP_NAME_SPLIT}:{TOPIC_RELATION_NAME_SPLIT_PATTERN}" + ) + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + app_unit = ops_test.model.applications[APPLICATION_APP_NAME_SPLIT].units[0] + action = await app_unit.run_action(action_name="set-mtls-cert") + _ = await action.wait() + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + requests = json.loads( + await get_application_relation_data( + ops_test, + KAFKA_APP_NAME, + TOPIC_RELATION_NAME, + "requests", + related_endpoint=TOPIC_RELATION_NAME_SPLIT_PATTERN, + ) + or "[]" + ) + request = requests[0] + secret_uri = request[f"{PROV_SECRET_PREFIX}mtls"] + secret_content = await get_juju_secret(ops_test, secret_uri) + mtls_cert = secret_content["mtls-cert"] + + kafka_unit = ops_test.model.applications[KAFKA_APP_NAME].units[0] + provider_cert_path = kafka_unit.workload_status_message + unit_cert = subprocess.check_output( + f"juju ssh {kafka_unit.name} cat {provider_cert_path}", shell=True, universal_newlines=True + ) + + assert unit_cert.strip() == mtls_cert.strip() + + +@pytest.mark.abort_on_fail +async def test_kafka_roles_relation_with_charm_libraries_secrets(ops_test: OpsTest): + """Test basic functionality of kafka-roles relation interface.""" + # Relate the charms and wait for them exchanging some connection data. + await ops_test.model.add_relation( + KAFKA_APP_NAME, f"{APPLICATION_APP_NAME}:{ROLES_RELATION_NAME}" + ) + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + # check unit message to check if the topic_created_event is triggered + for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: + assert unit.workload_status_message == "kafka_entity_created" + # check if the topic was granted + for unit in ops_test.model.applications[KAFKA_APP_NAME].units: + assert "created" in unit.workload_status_message + + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, ROLES_RELATION_NAME, "requests" + ) + or "[]" + ) + request = requests[0] + + secret_uri = request[f"{PROV_SECRET_PREFIX}entity"] + + secret_content = await get_juju_secret(ops_test, secret_uri) + entity_name = secret_content["entity-name"] + entity_pass = secret_content["entity-password"] + + assert entity_name == "admin" + assert entity_pass == "password" diff --git a/tests/v1/integration/test_kafka_connect_charm.py b/tests/v1/integration/test_kafka_connect_charm.py new file mode 100644 index 00000000..e39a7e6e --- /dev/null +++ b/tests/v1/integration/test_kafka_connect_charm.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. +import asyncio +import logging +from pathlib import Path + +import pytest +from pytest_operator.plugin import OpsTest + +from .helpers import get_application_relation_data, get_juju_secret, json + +logger = logging.getLogger(__name__) + +REQUIRER_APP_NAME = "requirer-app" +PROVIDER_APP_NAME = "kafka-connect" +APP_NAMES = [REQUIRER_APP_NAME, PROVIDER_APP_NAME] +SOURCE_REL = "connect-source" +SINK_REL = "connect-sink" +PROV_SECRET_PREFIX = "secret-" + + +@pytest.mark.abort_on_fail +@pytest.mark.skip_if_deployed +@pytest.mark.log_errors_allowed( + 'ERROR juju.worker.meterstatus error running "meter-status-changed": charm missing from disk' +) +async def test_deploy_charms( + ops_test: OpsTest, application_charm: Path, kafka_connect_charm: Path +): + """Test deployment of Kafka Connect provider and requirer toy charms.""" + await asyncio.gather( + ops_test.model.deploy( + application_charm, application_name=REQUIRER_APP_NAME, num_units=1, series="jammy" + ), + ops_test.model.deploy( + kafka_connect_charm, application_name=PROVIDER_APP_NAME, num_units=1, series="jammy" + ), + ) + + await ops_test.model.wait_for_idle( + apps=APP_NAMES, + idle_period=30, + timeout=1800, + status="active", + ) + + assert ops_test.model.applications[REQUIRER_APP_NAME].status == "active" + assert ops_test.model.applications[PROVIDER_APP_NAME].status == "active" + + +@pytest.mark.abort_on_fail +async def test_connect_client_relation_with_charm_libraries( + ops_test: OpsTest, request: pytest.FixtureRequest +): + """Test basic functionality of Kafka Connect client relation interface.""" + # Relate the charms and wait for them exchanging some connection data. + await ops_test.model.add_relation(PROVIDER_APP_NAME, f"{REQUIRER_APP_NAME}:{SOURCE_REL}") + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + # check unit messagge on requirer side + for unit in ops_test.model.applications[REQUIRER_APP_NAME].units: + assert unit.workload_status_message == "connect_integration_created" + # check unit message on provider side + for unit in ops_test.model.applications[PROVIDER_APP_NAME].units: + assert "successful" in unit.workload_status_message + + requests = json.loads( + await get_application_relation_data(ops_test, REQUIRER_APP_NAME, SOURCE_REL, "requests") + or "[]" + ) + _request = requests[0] + secret_uri = _request[f"{PROV_SECRET_PREFIX}user"] + + secret_content = await get_juju_secret(ops_test, secret_uri) + username = secret_content["username"] + password = secret_content["password"] + + endpoints = _request["endpoints"] + + request.config.cache.set("initial_password", password) + request.config.cache.set("initial_endpoints", endpoints) + + assert username == "integrator" + assert password == "password" + assert endpoints == "http://worker1:8083,http://worker2:8083" + + +@pytest.mark.abort_on_fail +async def test_kafka_connect_credentials_change(ops_test: OpsTest, request: pytest.FixtureRequest): + """Test Kafka Connect credentials change functionality.""" + # Get current password + password = request.config.cache.get("initial_password", "") + assert password == "password" + + # Change connect password + action = ( + await ops_test.model.applications[PROVIDER_APP_NAME] + .units[0] + .run_action("sync", key="password", value="newpass") + ) + await action.wait() + + await ops_test.model.wait_for_idle( + apps=APP_NAMES, + idle_period=20, + timeout=600, + status="active", + ) + + secret_uri = ( + await get_application_relation_data( + ops_test, REQUIRER_APP_NAME, SOURCE_REL, f"{PROV_SECRET_PREFIX}user" + ) + or "" + ) + + requests = json.loads( + await get_application_relation_data(ops_test, REQUIRER_APP_NAME, SOURCE_REL, "requests") + or "[]" + ) + _request = requests[0] + secret_uri = _request[f"{PROV_SECRET_PREFIX}user"] + + secret_content = await get_juju_secret(ops_test, secret_uri) + new_password = secret_content["password"] + + assert password != new_password + assert new_password == "newpass" + + +@pytest.mark.abort_on_fail +@pytest.mark.usefixtures("only_with_juju_secrets") +async def test_kafka_connect_endpoints_change(ops_test: OpsTest, request: pytest.FixtureRequest): + """Test Kafka Connect endpoints change functionality.""" + # Get current password + endpoints = request.config.cache.get("initial_endpoints", "") + assert endpoints == "http://worker1:8083,http://worker2:8083" + + # Change connect endpoints + action = ( + await ops_test.model.applications[PROVIDER_APP_NAME] + .units[0] + .run_action("sync", key="endpoints", value="http://worker1:8083") + ) + await action.wait() + + await ops_test.model.wait_for_idle( + apps=APP_NAMES, + idle_period=20, + timeout=600, + status="active", + ) + + requests = json.loads( + await get_application_relation_data(ops_test, REQUIRER_APP_NAME, SOURCE_REL, "requests") + or "[]" + ) + _request = requests[0] + new_endpoints = _request["endpoints"] + + assert endpoints != new_endpoints + assert new_endpoints == "http://worker1:8083" diff --git a/tests/v1/integration/test_opensearch_charm.py b/tests/v1/integration/test_opensearch_charm.py new file mode 100644 index 00000000..c13a27e4 --- /dev/null +++ b/tests/v1/integration/test_opensearch_charm.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python3 +# Copyright 2022 Canonical Ltd. +# See LICENSE file for licensing details. +import asyncio +import logging + +import pytest +from pytest_operator.plugin import OpsTest + +from .helpers import get_application_relation_data, get_juju_secret, json + +logger = logging.getLogger(__name__) + +APPLICATION_APP_NAME = "requirer-app" +OPENSEARCH_APP_NAME = "opensearch-test" +APP_NAMES = [APPLICATION_APP_NAME, OPENSEARCH_APP_NAME] +INDEX_RELATION_NAME = "opensearch-client-index" +ROLES_RELATION_NAME = "opensearch-client-roles" + +PROV_SECRET_PREFIX = "secret-" + + +@pytest.mark.abort_on_fail +async def test_deploy_charms(ops_test: OpsTest, application_charm, opensearch_charm): + """Deploy both charms (application and the testing opensearch app) to use in the tests.""" + # Deploy both charms (1 unit for each application to test that later they correctly + # set data in the relation application databag using only the leader unit). + await asyncio.gather( + ops_test.model.deploy( + application_charm, application_name=APPLICATION_APP_NAME, num_units=1, series="jammy" + ), + ops_test.model.deploy( + opensearch_charm, application_name=OPENSEARCH_APP_NAME, num_units=1, series="jammy" + ), + ) + await asyncio.gather( + ops_test.model.wait_for_idle( + apps=[OPENSEARCH_APP_NAME], status="active", wait_for_exact_units=1 + ), + ops_test.model.wait_for_idle( + apps=[APPLICATION_APP_NAME], status="active", wait_for_exact_units=1 + ), + ) + + +@pytest.mark.abort_on_fail +async def test_opensearch_relation_with_charm_libraries_secrets(ops_test: OpsTest): + """Test basic functionality of opensearch relation interface.""" + # Relate the charms and wait for them exchanging some connection data. + await ops_test.model.add_relation( + OPENSEARCH_APP_NAME, f"{APPLICATION_APP_NAME}:{INDEX_RELATION_NAME}" + ) + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + # check unit message to check if the index_created_event is triggered + for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: + assert unit.workload_status_message == "opensearch_index_created" + # check if index access is granted + for unit in ops_test.model.applications[OPENSEARCH_APP_NAME].units: + assert "granted" in unit.workload_status_message + + # Get the requests + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, INDEX_RELATION_NAME, "requests" + ) + or "[]" + ) + request = requests[0] + secret_uri = request[f"{PROV_SECRET_PREFIX}user"] + secret_data = await get_juju_secret(ops_test, secret_uri) + username = secret_data["username"] + password = secret_data["password"] + endpoints = request["endpoints"] + index = request["resource"] + + assert username == "admin" + assert password == "password" + assert endpoints == "host1:port,host2:port" + assert index == "test-index" + + +@pytest.mark.abort_on_fail +async def test_opensearch_relation_secret_changed(ops_test: OpsTest): + """Test basic functionality of opensearch relation interface.""" + # Get current password + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, INDEX_RELATION_NAME, "requests" + ) + or "[]" + ) + request = requests[0] + secret_uri = request[f"{PROV_SECRET_PREFIX}user"] + + secret_content = await get_juju_secret(ops_test, secret_uri) + password = secret_content["password"] + # Change admin password + unit_name = f"{OPENSEARCH_APP_NAME}/0" + action = await ops_test.model.units.get(unit_name).run_action("change-admin-password") + await action.wait() + + secret_content = await get_juju_secret(ops_test, secret_uri) + new_password = secret_content["password"] + assert password != new_password + + # check unit message to check if the index_created_event is triggered + for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: + assert unit.workload_status_message == "opensearch_authentication_updated" + + +@pytest.mark.abort_on_fail +async def test_opensearch_roles_relation_with_charm_libraries_secrets(ops_test: OpsTest): + """Test basic functionality of opensearch relation interface.""" + # Relate the charms and wait for them exchanging some connection data. + await ops_test.model.add_relation( + OPENSEARCH_APP_NAME, f"{APPLICATION_APP_NAME}:{ROLES_RELATION_NAME}" + ) + await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") + + # check unit message to check if the index_entity_created_event is triggered + for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: + assert unit.workload_status_message == "opensearch_entity_created" + # check if index role is created + for unit in ops_test.model.applications[OPENSEARCH_APP_NAME].units: + assert "created" in unit.workload_status_message + + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, ROLES_RELATION_NAME, "requests" + ) + or "[]" + ) + request = requests[0] + secret_uri = request[f"{PROV_SECRET_PREFIX}entity"] + + secret_content = await get_juju_secret(ops_test, secret_uri) + entity_name = secret_content["entity-name"] + entity_pass = secret_content["entity-password"] + + assert entity_name == "admin" + assert entity_pass == "password" diff --git a/tests/v1/unit/test_data_interfaces.py b/tests/v1/unit/test_data_interfaces.py index d0d41992..2a36fd6f 100644 --- a/tests/v1/unit/test_data_interfaces.py +++ b/tests/v1/unit/test_data_interfaces.py @@ -249,12 +249,7 @@ def test_diff(self): # Use a variable to easily update the relation changed event data during the test. data_model = ResourceProviderModel.model_validate( - { - "resource": "blah", - "request-id": "", - "username": "test-username", - "password": "test-password", - } + {"resource": "blah", "request-id": "", "secret-user": "secret://deabeef"} ) # Test with new data added to the relation databag. result = self.harness.charm.provider.compute_diff(relation, request=data_model, store=True) @@ -1056,12 +1051,7 @@ def test_diff(self): relation = self.harness.model.get_relation(self.relation_name, rel_id) data_model = ResourceProviderModel.model_validate( - { - "resource": "blah", - "request-id": "", - "username": "test-username", - "password": "test-password", - } + {"resource": "blah", "request-id": "", "secret-user": "secret://deadbeef"} ) # Test with new data added to the relation databag. diff --git a/tox.ini b/tox.ini index f963244a..27df41de 100644 --- a/tox.ini +++ b/tox.ini @@ -59,7 +59,9 @@ commands = --skip {tox_root}/poetry.lock \ --skip {tox_root}/tests/integration/data \ --ignore-words-list "assertIn" - codespell {[vars]lib_path} + + lint-v0: codespell {[vars]lib_path}/v0 + lint-v1: codespell {[vars]lib_path}/v0 ruff check {[vars]all_path} black --check --diff {[vars]all_path} @@ -95,6 +97,19 @@ deps = commands = pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/v0/integration/test_charm.py +[testenv:integration-db-v1] +description = Run database integration tests +deps = + psycopg2-binary + pytest<8.2.0 + juju{env:LIBJUJU_VERSION_SPECIFIER:==3.6.1.0} + pytest-operator<0.43 + pytest-mock + websockets{env:WEBSOCKETS_VERSION_SPECIFIER:} + -r {[vars]reqs_path}/v1/requirements.txt +commands = + pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/v1/integration/test_charm.py + [testenv:integration-upgrade-databag-v0] description = Run database integration tests deps = @@ -136,6 +151,20 @@ deps = commands = pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/v0/integration/test_kafka_charm.py +[testenv:integration-kafka-v1] +description = Run Kafka integration tests +deps = + psycopg2-binary + pytest<8.2.0 + juju{env:LIBJUJU_VERSION_SPECIFIER:==3.6.1.0} + pytest-operator<0.43 + pytest-mock + websockets{env:WEBSOCKETS_VERSION_SPECIFIER:} + -r {[vars]reqs_path}/v0/requirements.txt +commands = + pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/v1/integration/test_kafka_charm.py + + [testenv:integration-s3-v0] description = Run S3 integration tests deps = @@ -162,6 +191,19 @@ deps = commands = pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/v0/integration/test_opensearch_charm.py +[testenv:integration-opensearch-v1] +description = Run opensearch integration tests +deps = + psycopg2-binary + pytest<8.2.0 + juju{env:LIBJUJU_VERSION_SPECIFIER:==3.6.1.0} + pytest-operator<0.43 + pytest-mock + websockets{env:WEBSOCKETS_VERSION_SPECIFIER:} + -r {[vars]reqs_path}/v0/requirements.txt +commands = + pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/v1/integration/test_opensearch_charm.py + [testenv:integration-secrets-v0] description = Run secrets integration tests deps = @@ -187,3 +229,26 @@ deps = commands = pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/v0/integration/test_kafka_connect_charm.py +[testenv:integration-kafka-connect-v1] +description = Run Kafka Connect integration tests +deps = + pytest<8.2.0 + juju{env:LIBJUJU_VERSION_SPECIFIER:==3.6.1.0} + pytest-operator<0.43 + pytest-mock + websockets{env:WEBSOCKETS_VERSION_SPECIFIER:} + -r {[vars]reqs_path}/v0/requirements.txt +commands = + pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/v1/integration/test_kafka_connect_charm.py + +[testenv:integration-backward-compatibility-v1] +description = Run backward compatibility integration tests +deps = + pytest<8.2.0 + juju{env:LIBJUJU_VERSION_SPECIFIER:==3.6.1.0} + pytest-operator<0.43 + pytest-mock + websockets{env:WEBSOCKETS_VERSION_SPECIFIER:} + -r {[vars]reqs_path}/v0/requirements.txt +commands = + pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/v1/integration/test_backward_compatibility_charm.py From ef78f52604950d0001b7885f3e6040d3d563b75c Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Wed, 17 Sep 2025 13:39:40 +0200 Subject: [PATCH 03/34] feat: DPV1 --- .github/workflows/ci.yaml | 2 - .gitignore | 26 +- .../data_platform_libs/v1/data_interfaces.py | 172 +- .../lib/charms/data_platform_libs/v0/s3.py | 792 +++++ .../lib/charms/data_platform_libs/v0/s3.py | 792 +++++ tests/v1/conftest.py | 10 - .../application-charm/charmcraft.yaml | 18 +- .../data_platform_libs/v1/data_interfaces.py | 2753 +++++++++++++++++ .../integration/application-charm/poetry.lock | 34 +- .../application-charm/src/charm.py | 36 +- .../charmcraft.yaml | 18 +- .../database-charm/charmcraft.yaml | 18 +- .../integration/database-charm/src/charm.py | 70 +- .../dummy-database-charm/charmcraft.yaml | 18 +- .../data_platform_libs/v1/data_interfaces.py | 2753 +++++++++++++++++ .../dummy-database-charm/src/charm.py | 30 +- .../integration/kafka-charm/charmcraft.yaml | 18 +- .../data_platform_libs/v1/data_interfaces.py | 2753 +++++++++++++++++ .../kafka-connect-charm/charmcraft.yaml | 18 +- .../data_platform_libs/v1/data_interfaces.py | 2753 +++++++++++++++++ .../opensearch-charm/charmcraft.yaml | 18 +- .../data_platform_libs/v1/data_interfaces.py | 2753 +++++++++++++++++ tests/v1/integration/test_charm.py | 291 +- tests/v1/integration/test_kafka_charm.py | 4 +- tests/v1/unit/test_data_interfaces.py | 46 +- 25 files changed, 15794 insertions(+), 402 deletions(-) create mode 100644 tests/v0/integration/application-s3-charm/lib/charms/data_platform_libs/v0/s3.py create mode 100644 tests/v0/integration/s3-charm/lib/charms/data_platform_libs/v0/s3.py create mode 100644 tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/data_interfaces.py create mode 100644 tests/v1/integration/dummy-database-charm/lib/charms/data_platform_libs/v1/data_interfaces.py create mode 100644 tests/v1/integration/kafka-charm/lib/charms/data_platform_libs/v1/data_interfaces.py create mode 100644 tests/v1/integration/kafka-connect-charm/lib/charms/data_platform_libs/v1/data_interfaces.py create mode 100644 tests/v1/integration/opensearch-charm/lib/charms/data_platform_libs/v1/data_interfaces.py diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 850972b6..ae7f1368 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -167,8 +167,6 @@ jobs: # Update whenever charmcraft.yaml is changed - series: jammy bases-index: 0 - - series: noble - bases-index: 1 tox-environments: - integration-db-v1 - integration-opensearch-v1 diff --git a/.gitignore b/.gitignore index 2b3c9682..65392fef 100644 --- a/.gitignore +++ b/.gitignore @@ -5,12 +5,24 @@ build/ .coverage __pycache__/ *.py[cod] -tests/integration/application-charm/lib/charms/data_platform_libs/v0/database_requires.py -tests/integration/database-charm/lib/charms/data_platform_libs/v0/database_provides.py -tests/integration/application-charm/lib/charms/data_platform_libs/v0/data_interfaces.py -tests/integration/application-s3-charm/lib/charms/data_platform_libs/v0/s3.py -tests/integration/database-charm/lib/charms/data_platform_libs/v0/data_interfaces.py -tests/integration/kafka-charm/lib/charms/data_platform_libs/v0/data_interfaces.py -tests/integration/s3-charm/lib/charms/data_platform_libs/v0/s3.py' +tests/v0/integration/application-charm/lib/charms/data_platform_libs/v0/database_requires.py +tests/v0/integration/database-charm/lib/charms/data_platform_libs/v0/database_provides.py +tests/v0/integration/application-charm/lib/charms/data_platform_libs/v0/data_interfaces.py +tests/v0/integration/application-s3-charm/lib/charms/data_platform_libs/v0/s3.py +tests/v0/integration/database-charm/lib/charms/data_platform_libs/v0/data_interfaces.py +tests/v0/integration/kafka-charm/lib/charms/data_platform_libs/v0/data_interfaces.py +tests/v0/integration/s3-charm/lib/charms/data_platform_libs/v0/s3.py +tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/database_requires.py +tests/v1/integration/database-charm/lib/charms/data_platform_libs/v1/database_provides.py +tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/data_interfaces.py +tests/v1/integration/application-s3-charm/lib/charms/data_platform_libs/v1/s3.py +tests/v1/integration/database-charm/lib/charms/data_platform_libs/v1/data_interfaces.py +tests/v1/integration/kafka-charm/lib/charms/data_platform_libs/v1/data_interfaces.py +tests/v1/integration/s3-charm/lib/charms/data_platform_libs/v1/s3.py' +tests/v1/integration/backward-compatibility-charm/lib/charms/data_platform_libs/v0/data_interfaces.py +tests/v1/integration/backward-compatibility-charm/lib/charms/data_platform_libs/v1/data_interfaces.py +tests/v1/integration/opensearch-charm/lib/charms/data_platform_libs/v1/data_interfaces.py +tests/v1/integration/kafka-connect-charm/lib/charms/data_platform_libs/v1/data_interfaces.py +tests/v1/integration/dummy-database-charm/lib/charms/data_platform_libs/v1/data_interfaces.py .vscode/ .idea/ diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index b49947f7..e22388d1 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -287,9 +287,9 @@ def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: from typing_extensions import TypeAliasType, override try: - import psycopg + import psycopg2 except ImportError: - psycopg = None + psycopg2 = None # The unique Charmhub library identifier, never change it LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" @@ -654,6 +654,7 @@ class PeerModel(BaseModel): populate_by_name=True, serialize_by_alias=True, alias_generator=lambda x: x.replace("_", "-"), + extra="allow", ) @model_validator(mode="after") @@ -680,6 +681,8 @@ def extract_secrets(self, info: ValidationInfo): if value and field_info.annotation == OptionalSecretBool: value = SecretBool(json.loads(value)) + elif value: + value = SecretStr(value) setattr(self, field, value) return self @@ -706,9 +709,11 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ actual_value = ( value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) if secret is None: - if actual_value: + if value: secret = repository.add_secret( aliased_field, actual_value, @@ -721,11 +726,9 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ content = secret.get_content() full_content = copy.deepcopy(content) - if actual_value is None: - full_content.pop(field, None) + if value is None: + full_content.pop(aliased_field, None) else: - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) full_content.update({aliased_field: actual_value}) secret.set_content(full_content) return handler(self) @@ -744,6 +747,7 @@ class CommonModel(BaseModel): populate_by_name=True, serialize_by_alias=True, alias_generator=lambda x: x.replace("_", "-"), + extra="allow", ) resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") @@ -787,6 +791,9 @@ def extract_secrets(self, info: ValidationInfo): value = secret.get_content().get(aliased_field) if value and field_info.annotation == OptionalSecretBool: value = SecretBool(json.loads(value)) + elif value: + value = SecretStr(value) + setattr(self, field, value) return self @@ -822,9 +829,11 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ actual_value = ( value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) if secret is None: - if actual_value: + if value: secret = repository.add_secret( aliased_field, actual_value, secret_group, short_uuid ) @@ -836,12 +845,10 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ content = secret.get_content() full_content = copy.deepcopy(content) - if actual_value is None: - full_content.pop(field, None) + if value is None: + full_content.pop(aliased_field, None) _encountered_secrets.add((secret, secret_field)) else: - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) full_content.update({aliased_field: actual_value}) secret.set_content(full_content) @@ -1003,10 +1010,11 @@ class DataContractV1(BaseModel, Generic[TResourceProviderModel]): TCommonModel = TypeVar("TCommonModel", bound=CommonModel) -def is_topic_value_acceptable(value: str | None): +def is_topic_value_acceptable(value: str | None) -> str | None: """Check whether the given Kafka topic value is acceptable.""" if value and "*" in value[:3]: raise ValueError(f"Error on topic '{value}',, unacceptable value.") + return value class KafkaRequestModel(RequirerCommonModel): @@ -1709,9 +1717,12 @@ def write_model( """Writes the data stored in the model using the repository object.""" context = context or {} dumped = model.model_dump( - mode="json", context={"repository": repository} | context, exclude_none=True + mode="json", context={"repository": repository} | context, exclude_none=False ) for field, value in dumped.items(): + if value is None: + repository.delete_field(field) + continue dumped_value = value if isinstance(value, str) else json.dumps(value) repository.write_field(field, dumped_value) @@ -1951,13 +1962,19 @@ class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read/Write enpoinds are changed.""" + """Read/Write enpoints are changed.""" pass class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read-only enpoinds are changed.""" + """Read-only enpoints are changed.""" + + pass + + +class AuthenticationUpdatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Authentication was updated for a user.""" pass @@ -1972,6 +1989,7 @@ class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): resource_entity_created = EventSource(ResourceEntityCreatedEvent) endpoints_changed = EventSource(ResourceEndpointsChangedEvent) read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) + authentication_updated = EventSource(AuthenticationUpdatedEvent) ############################################################################## @@ -2072,6 +2090,34 @@ def compute_diff( return _diff + def _relation_from_secret_label(self, secret_label: str) -> Relation | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 3): + return + + try: + relation_id = int(contents[1]) + except ValueError: + return + + relation_name = contents[0] + + try: + return self.model.get_relation(relation_name, relation_id) + except ModelError: + return + + def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 5): + return + + return contents[2] + class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): """Event Handler for resource provider.""" @@ -2204,34 +2250,6 @@ def _handle_bulk_event( ) store_new_data(event.relation, self.component, new_data, request.request_id) - def _relation_from_secret_label(self, secret_label: str) -> Relation | None: - """Retrieve the relation that belongs to a secret label.""" - contents = secret_label.split(".") - - if not (contents and len(contents) >= 3): - return - - try: - relation_id = int(contents[1]) - except ValueError: - return - - relation_name = contents[0] - - try: - return self.model.get_relation(relation_name, relation_id) - except ModelError: - return - - def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: - """Retrieve the relation that belongs to a secret label.""" - contents = secret_label.split(".") - - if not (contents and len(contents) >= 5): - return - - return contents[2] - @override def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: if not self.mtls_enabled: @@ -2251,6 +2269,11 @@ def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: if relation.app == self.charm.app: logging.info("Secret changed event ignored for Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return remote_unit = None for unit in relation.units: @@ -2470,39 +2493,39 @@ def are_all_resources_created(self, rel_id: int) -> bool: def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: # Actual checking method. # No need to check for psycopg here, it's been checked before. - if not psycopg: + if not psycopg2: return False try: - with psycopg.connect(connection_string) as connection: + with psycopg2.connect(connection_string) as connection: with connection.cursor() as cursor: cursor.execute( "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) ) return cursor.fetchone() is not None - except psycopg.Error as e: + except psycopg2.Error as e: logger.exception( f"failed to check whether {plugin} plugin is enabled in the database: %s", str(e), ) return False - def is_postgresql_plugin_enabled(self, plugin: str, relation_id: int = 0) -> bool: + def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: """Returns whether a plugin is enabled in the database. Args: plugin: name of the plugin to check. - relation_id: Optional index to check the database (default: 0 - first relation). + relation_index: Optional index to check the database (default: 0 - first relation). """ - if not psycopg: + if not psycopg2: return False # Can't check a non existing relation. - if len(self.relations) <= relation_id: + if len(self.relations) <= relation_index: return False - relation_id = self.relations[relation_id].id - model = self.interface.build_model(relation_id=relation_id) + relation = self.relations[relation_index] + model = self.interface.build_model(relation_id=relation.id, component=relation.app) for request in model.requests: if request.endpoints and request.username and request.password: host = request.endpoints.split(":")[0] @@ -2577,7 +2600,48 @@ def _get_relation_alias(self, relation_id: int) -> str | None: def _on_secret_changed_event(self, event: SecretChangedEvent): """Event notifying about a new value of a secret.""" - pass + if not event.secret.label: + return + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + + response_model = self.interface.build_model(relation.id) + if not short_uuid: + return + for _response in response_model.requests: + if _response.request_id == short_uuid: + response = _response + break + else: + logger.info(f"Unknown request id {short_uuid}") + return + + getattr(self.on, "authentication_updated").emit( + relation, + app=relation.app, + unit=remote_unit, + response=response, + ) def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: """Event emitted when the database relation is created.""" diff --git a/tests/v0/integration/application-s3-charm/lib/charms/data_platform_libs/v0/s3.py b/tests/v0/integration/application-s3-charm/lib/charms/data_platform_libs/v0/s3.py new file mode 100644 index 00000000..dbf4d5bb --- /dev/null +++ b/tests/v0/integration/application-s3-charm/lib/charms/data_platform_libs/v0/s3.py @@ -0,0 +1,792 @@ +# Copyright 2023 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r"""A library for communicating with the S3 credentials providers and consumers. + +This library provides the relevant interface code implementing the communication +specification for fetching, retrieving, triggering, and responding to events related to +the S3 provider charm and its consumers. + +### Provider charm + +The provider is implemented in the `s3-provider` charm which is meant to be deployed +alongside one or more consumer charms. The provider charm is serving the s3 credentials and +metadata needed to communicate and work with an S3 compatible backend. + +Example: +```python + +from charms.data_platform_libs.v0.s3 import CredentialRequestedEvent, S3Provider + + +class ExampleProviderCharm(CharmBase): + def __init__(self, *args) -> None: + super().__init__(*args) + self.s3_provider = S3Provider(self, "s3-credentials") + + self.framework.observe(self.s3_provider.on.credentials_requested, + self._on_credential_requested) + + def _on_credential_requested(self, event: CredentialRequestedEvent): + if not self.unit.is_leader(): + return + + # get relation id + relation_id = event.relation.id + + # get bucket name + bucket = event.bucket + + # S3 configuration parameters + desired_configuration = {"access-key": "your-access-key", "secret-key": + "your-secret-key", "bucket": "your-bucket"} + + # update the configuration + self.s3_provider.update_connection_info(relation_id, desired_configuration) + + # or it is possible to set each field independently + + self.s3_provider.set_secret_key(relation_id, "your-secret-key") + + +if __name__ == "__main__": + main(ExampleProviderCharm) + + +### Requirer charm + +The requirer charm is the charm requiring the S3 credentials. +An example of requirer charm is the following: + +Example: +```python + +from charms.data_platform_libs.v0.s3 import ( + CredentialsChangedEvent, + CredentialsGoneEvent, + S3Requirer +) + +class ExampleRequirerCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + + bucket_name = "test-bucket" + # if bucket name is not provided the bucket name will be generated + # e.g., ('relation-{relation.id}') + + self.s3_client = S3Requirer(self, "s3-credentials", bucket_name) + + self.framework.observe(self.s3_client.on.credentials_changed, self._on_credential_changed) + self.framework.observe(self.s3_client.on.credentials_gone, self._on_credential_gone) + + def _on_credential_changed(self, event: CredentialsChangedEvent): + + # access single parameter credential + secret_key = event.secret_key + access_key = event.access_key + + # or as alternative all credentials can be collected as a dictionary + credentials = self.s3_client.get_s3_credentials() + + def _on_credential_gone(self, event: CredentialsGoneEvent): + # credentials are removed + pass + + if __name__ == "__main__": + main(ExampleRequirerCharm) +``` + +""" + +import json +import logging +from collections import namedtuple +from typing import Dict, List, Optional, Union + +import ops.charm +import ops.framework +import ops.model +from ops.charm import ( + CharmBase, + CharmEvents, + RelationBrokenEvent, + RelationChangedEvent, + RelationEvent, + RelationJoinedEvent, +) +from ops.framework import EventSource, Object, ObjectEvents +from ops.model import Application, Relation, RelationDataContent, Unit + +# The unique Charmhub library identifier, never change it +LIBID = "fca396f6254246c9bfa565b1f85ab528" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 6 + +logger = logging.getLogger(__name__) + +Diff = namedtuple("Diff", "added changed deleted") +Diff.__doc__ = """ +A tuple for storing the diff between two data mappings. + +added - keys that were added +changed - keys that still exist but have new values +deleted - key that were deleted""" + + +def diff(event: RelationChangedEvent, bucket: Union[Unit, Application]) -> Diff: + """Retrieves the diff of the data in the relation changed databag. + + Args: + event: relation changed event. + bucket: bucket of the databag (app or unit) + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + # Retrieve the old data from the data key in the application relation databag. + old_data = json.loads(event.relation.data[bucket].get("data", "{}")) + # Retrieve the new data from the event relation databag. + new_data = ( + {key: value for key, value in event.relation.data[event.app].items() if key != "data"} + if event.app + else {} + ) + + # These are the keys that were added to the databag and triggered this event. + added = new_data.keys() - old_data.keys() + # These are the keys that were removed from the databag and triggered this event. + deleted = old_data.keys() - new_data.keys() + # These are the keys that already existed in the databag, + # but had their values changed. + changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} + + # TODO: evaluate the possibility of losing the diff if some error + # happens in the charm before the diff is completely checked (DPE-412). + # Convert the new_data to a serializable format and save it for a next diff check. + event.relation.data[bucket].update({"data": json.dumps(new_data)}) + + # Return the diff with all possible changes. + return Diff(added, changed, deleted) + + +class BucketEvent(RelationEvent): + """Base class for bucket events.""" + + @property + def bucket(self) -> Optional[str]: + """Returns the bucket was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("bucket", "") + + +class CredentialRequestedEvent(BucketEvent): + """Event emitted when a set of credential is requested for use on this relation.""" + + +class S3CredentialEvents(CharmEvents): + """Event descriptor for events raised by S3Provider.""" + + credentials_requested = EventSource(CredentialRequestedEvent) + + +class S3Provider(Object): + """A provider handler for communicating S3 credentials to consumers.""" + + on = S3CredentialEvents() # pyright: ignore [reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + ): + super().__init__(charm, relation_name) + self.charm = charm + self.local_app = self.charm.model.app + self.local_unit = self.charm.unit + self.relation_name = relation_name + + # monitor relation changed event for changes in the credentials + self.framework.observe(charm.on[relation_name].relation_changed, self._on_relation_changed) + + def _on_relation_changed(self, event: RelationChangedEvent) -> None: + """React to the relation changed event by consuming data.""" + if not self.charm.unit.is_leader(): + return + diff = self._diff(event) + # emit on credential requested if bucket is provided by the requirer application + if "bucket" in diff.added: + getattr(self.on, "credentials_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + def _load_relation_data(self, raw_relation_data: dict) -> dict: + """Loads relation data from the relation data bag. + + Args: + raw_relation_data: Relation data from the databag + Returns: + dict: Relation data in dict format. + """ + connection_data = {} + for key in raw_relation_data: + try: + connection_data[key] = json.loads(raw_relation_data[key]) + except (json.decoder.JSONDecodeError, TypeError): + connection_data[key] = raw_relation_data[key] + return connection_data + + # def _diff(self, event: RelationChangedEvent) -> Diff: + # """Retrieves the diff of the data in the relation changed databag. + + # Args: + # event: relation changed event. + + # Returns: + # a Diff instance containing the added, deleted and changed + # keys from the event relation databag. + # """ + # # Retrieve the old data from the data key in the application relation databag. + # old_data = json.loads(event.relation.data[self.local_app].get("data", "{}")) + # # Retrieve the new data from the event relation databag. + # new_data = { + # key: value for key, value in event.relation.data[event.app].items() if key != "data" + # } + + # # These are the keys that were added to the databag and triggered this event. + # added = new_data.keys() - old_data.keys() + # # These are the keys that were removed from the databag and triggered this event. + # deleted = old_data.keys() - new_data.keys() + # # These are the keys that already existed in the databag, + # # but had their values changed. + # changed = { + # key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key] + # } + + # # TODO: evaluate the possibility of losing the diff if some error + # # happens in the charm before the diff is completely checked (DPE-412). + # # Convert the new_data to a serializable format and save it for a next diff check. + # event.relation.data[self.local_app].update({"data": json.dumps(new_data)}) + + # # Return the diff with all possible changes. + # return Diff(added, changed, deleted) + + def _diff(self, event: RelationChangedEvent) -> Diff: + """Retrieves the diff of the data in the relation changed databag. + + Args: + event: relation changed event. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + return diff(event, self.local_app) + + def fetch_relation_data(self) -> dict: + """Retrieves data from relation. + + This function can be used to retrieve data from a relation + in the charm code when outside an event callback. + + Returns: + a dict of the values stored in the relation data bag + for all relation instances (indexed by the relation id). + """ + data = {} + for relation in self.relations: + data[relation.id] = ( + {key: value for key, value in relation.data[relation.app].items() if key != "data"} + if relation.app + else {} + ) + return data + + def update_connection_info(self, relation_id: int, connection_data: dict) -> None: + """Updates the credential data as set of key-value pairs in the relation. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + connection_data: dict containing the key-value pairs + that should be updated. + """ + # check and write changes only if you are the leader + if not self.local_unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + return + + # configuration options that are list + s3_list_options = ["attributes", "tls-ca-chain"] + + # update the databag, if connection data did not change with respect to before + # the relation changed event is not triggered + updated_connection_data = {} + for configuration_option, configuration_value in connection_data.items(): + if configuration_option in s3_list_options: + updated_connection_data[configuration_option] = json.dumps(configuration_value) + else: + updated_connection_data[configuration_option] = configuration_value + + relation.data[self.local_app].update(updated_connection_data) + logger.debug("Updated S3 connection info.") + + @property + def relations(self) -> List[Relation]: + """The list of Relation instances associated with this relation_name.""" + return list(self.charm.model.relations[self.relation_name]) + + def set_bucket(self, relation_id: int, bucket: str) -> None: + """Sets bucket name in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + bucket: the bucket name. + """ + self.update_connection_info(relation_id, {"bucket": bucket}) + + def set_access_key(self, relation_id: int, access_key: str) -> None: + """Sets access-key value in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + access_key: the access-key value. + """ + self.update_connection_info(relation_id, {"access-key": access_key}) + + def set_secret_key(self, relation_id: int, secret_key: str) -> None: + """Sets the secret key value in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + secret_key: the value of the secret key. + """ + self.update_connection_info(relation_id, {"secret-key": secret_key}) + + def set_path(self, relation_id: int, path: str) -> None: + """Sets the path value in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + path: the path value. + """ + self.update_connection_info(relation_id, {"path": path}) + + def set_endpoint(self, relation_id: int, endpoint: str) -> None: + """Sets the endpoint address in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + endpoint: the endpoint address. + """ + self.update_connection_info(relation_id, {"endpoint": endpoint}) + + def set_region(self, relation_id: int, region: str) -> None: + """Sets the region location in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + region: the region address. + """ + self.update_connection_info(relation_id, {"region": region}) + + def set_s3_uri_style(self, relation_id: int, s3_uri_style: str) -> None: + """Sets the S3 URI style in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + s3_uri_style: the s3 URI style. + """ + self.update_connection_info(relation_id, {"s3-uri-style": s3_uri_style}) + + def set_storage_class(self, relation_id: int, storage_class: str) -> None: + """Sets the storage class in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + storage_class: the storage class. + """ + self.update_connection_info(relation_id, {"storage-class": storage_class}) + + def set_tls_ca_chain(self, relation_id: int, tls_ca_chain: List[str]) -> None: + """Sets the tls_ca_chain value in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + tls_ca_chain: the TLS Chain value. + """ + self.update_connection_info(relation_id, {"tls-ca-chain": tls_ca_chain}) + + def set_s3_api_version(self, relation_id: int, s3_api_version: str) -> None: + """Sets the S3 API version in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + s3_api_version: the S3 version value. + """ + self.update_connection_info(relation_id, {"s3-api-version": s3_api_version}) + + def set_delete_older_than_days(self, relation_id: int, days: int) -> None: + """Sets the retention days for full backups in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + days: the value. + """ + self.update_connection_info(relation_id, {"delete-older-than-days": str(days)}) + + def set_attributes(self, relation_id: int, attributes: List[str]) -> None: + """Sets the connection attributes in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + attributes: the attributes value. + """ + self.update_connection_info(relation_id, {"attributes": attributes}) + + +class S3Event(RelationEvent): + """Base class for S3 storage events.""" + + @property + def bucket(self) -> Optional[str]: + """Returns the bucket name.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("bucket") + + @property + def access_key(self) -> Optional[str]: + """Returns the access key.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("access-key") + + @property + def secret_key(self) -> Optional[str]: + """Returns the secret key.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("secret-key") + + @property + def path(self) -> Optional[str]: + """Returns the path where data can be stored.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("path") + + @property + def endpoint(self) -> Optional[str]: + """Returns the endpoint address.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("endpoint") + + @property + def region(self) -> Optional[str]: + """Returns the region.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("region") + + @property + def s3_uri_style(self) -> Optional[str]: + """Returns the s3 uri style.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("s3-uri-style") + + @property + def storage_class(self) -> Optional[str]: + """Returns the storage class name.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("storage-class") + + @property + def tls_ca_chain(self) -> Optional[List[str]]: + """Returns the TLS CA chain.""" + if not self.relation.app: + return None + + tls_ca_chain = self.relation.data[self.relation.app].get("tls-ca-chain") + if tls_ca_chain is not None: + return json.loads(tls_ca_chain) + return None + + @property + def s3_api_version(self) -> Optional[str]: + """Returns the S3 API version.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("s3-api-version") + + @property + def delete_older_than_days(self) -> Optional[int]: + """Returns the retention days for full backups.""" + if not self.relation.app: + return None + + days = self.relation.data[self.relation.app].get("delete-older-than-days") + if days is None: + return None + return int(days) + + @property + def attributes(self) -> Optional[List[str]]: + """Returns the attributes.""" + if not self.relation.app: + return None + + attributes = self.relation.data[self.relation.app].get("attributes") + if attributes is not None: + return json.loads(attributes) + return None + + +class CredentialsChangedEvent(S3Event): + """Event emitted when S3 credential are changed on this relation.""" + + +class CredentialsGoneEvent(RelationEvent): + """Event emitted when S3 credential are removed from this relation.""" + + +class S3CredentialRequiresEvents(ObjectEvents): + """Event descriptor for events raised by the S3Provider.""" + + credentials_changed = EventSource(CredentialsChangedEvent) + credentials_gone = EventSource(CredentialsGoneEvent) + + +S3_REQUIRED_OPTIONS = ["access-key", "secret-key"] + + +class S3Requirer(Object): + """Requires-side of the s3 relation.""" + + on = S3CredentialRequiresEvents() # pyright: ignore[reportAssignmentType] + + def __init__( + self, charm: ops.charm.CharmBase, relation_name: str, bucket_name: Optional[str] = None + ): + """Manager of the s3 client relations.""" + super().__init__(charm, relation_name) + + self.relation_name = relation_name + self.charm = charm + self.local_app = self.charm.model.app + self.local_unit = self.charm.unit + self.bucket = bucket_name + + self.framework.observe( + self.charm.on[self.relation_name].relation_changed, self._on_relation_changed + ) + + self.framework.observe( + self.charm.on[self.relation_name].relation_joined, self._on_relation_joined + ) + + self.framework.observe( + self.charm.on[self.relation_name].relation_broken, + self._on_relation_broken, + ) + + def _generate_bucket_name(self, event: RelationJoinedEvent): + """Returns the bucket name generated from relation id.""" + return f"relation-{event.relation.id}" + + def _on_relation_joined(self, event: RelationJoinedEvent) -> None: + """Event emitted when the application joins the s3 relation.""" + if self.bucket is None: + self.bucket = self._generate_bucket_name(event) + self.update_connection_info(event.relation.id, {"bucket": self.bucket}) + + def fetch_relation_data(self) -> dict: + """Retrieves data from relation. + + This function can be used to retrieve data from a relation + in the charm code when outside an event callback. + + Returns: + a dict of the values stored in the relation data bag + for all relation instances (indexed by the relation id). + """ + data = {} + + for relation in self.relations: + data[relation.id] = self._load_relation_data(relation.data[self.charm.app]) + return data + + def update_connection_info(self, relation_id: int, connection_data: dict) -> None: + """Updates the credential data as set of key-value pairs in the relation. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + connection_data: dict containing the key-value pairs + that should be updated. + """ + # check and write changes only if you are the leader + if not self.local_unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + return + + # update the databag, if connection data did not change with respect to before + # the relation changed event is not triggered + # configuration options that are list + s3_list_options = ["attributes", "tls-ca-chain"] + updated_connection_data = {} + for configuration_option, configuration_value in connection_data.items(): + if configuration_option in s3_list_options: + updated_connection_data[configuration_option] = json.dumps(configuration_value) + else: + updated_connection_data[configuration_option] = configuration_value + + relation.data[self.local_app].update(updated_connection_data) + logger.debug("Updated S3 credentials.") + + def _load_relation_data(self, raw_relation_data: RelationDataContent) -> Dict[str, str]: + """Loads relation data from the relation data bag. + + Args: + raw_relation_data: Relation data from the databag + Returns: + dict: Relation data in dict format. + """ + connection_data = {} + for key in raw_relation_data: + try: + connection_data[key] = json.loads(raw_relation_data[key]) + except (json.decoder.JSONDecodeError, TypeError): + connection_data[key] = raw_relation_data[key] + return connection_data + + def _diff(self, event: RelationChangedEvent) -> Diff: + """Retrieves the diff of the data in the relation changed databag. + + Args: + event: relation changed event. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + return diff(event, self.local_unit) + + def _on_relation_changed(self, event: RelationChangedEvent) -> None: + """Notify the charm about the presence of S3 credentials.""" + # check if the mandatory options are in the relation data + contains_required_options = True + # get current credentials data + credentials = self.get_s3_connection_info() + # records missing options + missing_options = [] + for configuration_option in S3_REQUIRED_OPTIONS: + if configuration_option not in credentials: + contains_required_options = False + missing_options.append(configuration_option) + # emit credential change event only if all mandatory fields are present + if contains_required_options: + getattr(self.on, "credentials_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + else: + logger.warning( + f"Some mandatory fields: {missing_options} are not present, do not emit credential change event!" + ) + + def get_s3_connection_info(self) -> Dict[str, str]: + """Return the s3 credentials as a dictionary.""" + for relation in self.relations: + if relation and relation.app: + return self._load_relation_data(relation.data[relation.app]) + + return {} + + def _on_relation_broken(self, event: RelationBrokenEvent) -> None: + """Notify the charm about a broken S3 credential store relation.""" + getattr(self.on, "credentials_gone").emit(event.relation, app=event.app, unit=event.unit) + + @property + def relations(self) -> List[Relation]: + """The list of Relation instances associated with this relation_name.""" + return list(self.charm.model.relations[self.relation_name]) diff --git a/tests/v0/integration/s3-charm/lib/charms/data_platform_libs/v0/s3.py b/tests/v0/integration/s3-charm/lib/charms/data_platform_libs/v0/s3.py new file mode 100644 index 00000000..dbf4d5bb --- /dev/null +++ b/tests/v0/integration/s3-charm/lib/charms/data_platform_libs/v0/s3.py @@ -0,0 +1,792 @@ +# Copyright 2023 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r"""A library for communicating with the S3 credentials providers and consumers. + +This library provides the relevant interface code implementing the communication +specification for fetching, retrieving, triggering, and responding to events related to +the S3 provider charm and its consumers. + +### Provider charm + +The provider is implemented in the `s3-provider` charm which is meant to be deployed +alongside one or more consumer charms. The provider charm is serving the s3 credentials and +metadata needed to communicate and work with an S3 compatible backend. + +Example: +```python + +from charms.data_platform_libs.v0.s3 import CredentialRequestedEvent, S3Provider + + +class ExampleProviderCharm(CharmBase): + def __init__(self, *args) -> None: + super().__init__(*args) + self.s3_provider = S3Provider(self, "s3-credentials") + + self.framework.observe(self.s3_provider.on.credentials_requested, + self._on_credential_requested) + + def _on_credential_requested(self, event: CredentialRequestedEvent): + if not self.unit.is_leader(): + return + + # get relation id + relation_id = event.relation.id + + # get bucket name + bucket = event.bucket + + # S3 configuration parameters + desired_configuration = {"access-key": "your-access-key", "secret-key": + "your-secret-key", "bucket": "your-bucket"} + + # update the configuration + self.s3_provider.update_connection_info(relation_id, desired_configuration) + + # or it is possible to set each field independently + + self.s3_provider.set_secret_key(relation_id, "your-secret-key") + + +if __name__ == "__main__": + main(ExampleProviderCharm) + + +### Requirer charm + +The requirer charm is the charm requiring the S3 credentials. +An example of requirer charm is the following: + +Example: +```python + +from charms.data_platform_libs.v0.s3 import ( + CredentialsChangedEvent, + CredentialsGoneEvent, + S3Requirer +) + +class ExampleRequirerCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + + bucket_name = "test-bucket" + # if bucket name is not provided the bucket name will be generated + # e.g., ('relation-{relation.id}') + + self.s3_client = S3Requirer(self, "s3-credentials", bucket_name) + + self.framework.observe(self.s3_client.on.credentials_changed, self._on_credential_changed) + self.framework.observe(self.s3_client.on.credentials_gone, self._on_credential_gone) + + def _on_credential_changed(self, event: CredentialsChangedEvent): + + # access single parameter credential + secret_key = event.secret_key + access_key = event.access_key + + # or as alternative all credentials can be collected as a dictionary + credentials = self.s3_client.get_s3_credentials() + + def _on_credential_gone(self, event: CredentialsGoneEvent): + # credentials are removed + pass + + if __name__ == "__main__": + main(ExampleRequirerCharm) +``` + +""" + +import json +import logging +from collections import namedtuple +from typing import Dict, List, Optional, Union + +import ops.charm +import ops.framework +import ops.model +from ops.charm import ( + CharmBase, + CharmEvents, + RelationBrokenEvent, + RelationChangedEvent, + RelationEvent, + RelationJoinedEvent, +) +from ops.framework import EventSource, Object, ObjectEvents +from ops.model import Application, Relation, RelationDataContent, Unit + +# The unique Charmhub library identifier, never change it +LIBID = "fca396f6254246c9bfa565b1f85ab528" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 6 + +logger = logging.getLogger(__name__) + +Diff = namedtuple("Diff", "added changed deleted") +Diff.__doc__ = """ +A tuple for storing the diff between two data mappings. + +added - keys that were added +changed - keys that still exist but have new values +deleted - key that were deleted""" + + +def diff(event: RelationChangedEvent, bucket: Union[Unit, Application]) -> Diff: + """Retrieves the diff of the data in the relation changed databag. + + Args: + event: relation changed event. + bucket: bucket of the databag (app or unit) + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + # Retrieve the old data from the data key in the application relation databag. + old_data = json.loads(event.relation.data[bucket].get("data", "{}")) + # Retrieve the new data from the event relation databag. + new_data = ( + {key: value for key, value in event.relation.data[event.app].items() if key != "data"} + if event.app + else {} + ) + + # These are the keys that were added to the databag and triggered this event. + added = new_data.keys() - old_data.keys() + # These are the keys that were removed from the databag and triggered this event. + deleted = old_data.keys() - new_data.keys() + # These are the keys that already existed in the databag, + # but had their values changed. + changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} + + # TODO: evaluate the possibility of losing the diff if some error + # happens in the charm before the diff is completely checked (DPE-412). + # Convert the new_data to a serializable format and save it for a next diff check. + event.relation.data[bucket].update({"data": json.dumps(new_data)}) + + # Return the diff with all possible changes. + return Diff(added, changed, deleted) + + +class BucketEvent(RelationEvent): + """Base class for bucket events.""" + + @property + def bucket(self) -> Optional[str]: + """Returns the bucket was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("bucket", "") + + +class CredentialRequestedEvent(BucketEvent): + """Event emitted when a set of credential is requested for use on this relation.""" + + +class S3CredentialEvents(CharmEvents): + """Event descriptor for events raised by S3Provider.""" + + credentials_requested = EventSource(CredentialRequestedEvent) + + +class S3Provider(Object): + """A provider handler for communicating S3 credentials to consumers.""" + + on = S3CredentialEvents() # pyright: ignore [reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + ): + super().__init__(charm, relation_name) + self.charm = charm + self.local_app = self.charm.model.app + self.local_unit = self.charm.unit + self.relation_name = relation_name + + # monitor relation changed event for changes in the credentials + self.framework.observe(charm.on[relation_name].relation_changed, self._on_relation_changed) + + def _on_relation_changed(self, event: RelationChangedEvent) -> None: + """React to the relation changed event by consuming data.""" + if not self.charm.unit.is_leader(): + return + diff = self._diff(event) + # emit on credential requested if bucket is provided by the requirer application + if "bucket" in diff.added: + getattr(self.on, "credentials_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + def _load_relation_data(self, raw_relation_data: dict) -> dict: + """Loads relation data from the relation data bag. + + Args: + raw_relation_data: Relation data from the databag + Returns: + dict: Relation data in dict format. + """ + connection_data = {} + for key in raw_relation_data: + try: + connection_data[key] = json.loads(raw_relation_data[key]) + except (json.decoder.JSONDecodeError, TypeError): + connection_data[key] = raw_relation_data[key] + return connection_data + + # def _diff(self, event: RelationChangedEvent) -> Diff: + # """Retrieves the diff of the data in the relation changed databag. + + # Args: + # event: relation changed event. + + # Returns: + # a Diff instance containing the added, deleted and changed + # keys from the event relation databag. + # """ + # # Retrieve the old data from the data key in the application relation databag. + # old_data = json.loads(event.relation.data[self.local_app].get("data", "{}")) + # # Retrieve the new data from the event relation databag. + # new_data = { + # key: value for key, value in event.relation.data[event.app].items() if key != "data" + # } + + # # These are the keys that were added to the databag and triggered this event. + # added = new_data.keys() - old_data.keys() + # # These are the keys that were removed from the databag and triggered this event. + # deleted = old_data.keys() - new_data.keys() + # # These are the keys that already existed in the databag, + # # but had their values changed. + # changed = { + # key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key] + # } + + # # TODO: evaluate the possibility of losing the diff if some error + # # happens in the charm before the diff is completely checked (DPE-412). + # # Convert the new_data to a serializable format and save it for a next diff check. + # event.relation.data[self.local_app].update({"data": json.dumps(new_data)}) + + # # Return the diff with all possible changes. + # return Diff(added, changed, deleted) + + def _diff(self, event: RelationChangedEvent) -> Diff: + """Retrieves the diff of the data in the relation changed databag. + + Args: + event: relation changed event. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + return diff(event, self.local_app) + + def fetch_relation_data(self) -> dict: + """Retrieves data from relation. + + This function can be used to retrieve data from a relation + in the charm code when outside an event callback. + + Returns: + a dict of the values stored in the relation data bag + for all relation instances (indexed by the relation id). + """ + data = {} + for relation in self.relations: + data[relation.id] = ( + {key: value for key, value in relation.data[relation.app].items() if key != "data"} + if relation.app + else {} + ) + return data + + def update_connection_info(self, relation_id: int, connection_data: dict) -> None: + """Updates the credential data as set of key-value pairs in the relation. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + connection_data: dict containing the key-value pairs + that should be updated. + """ + # check and write changes only if you are the leader + if not self.local_unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + return + + # configuration options that are list + s3_list_options = ["attributes", "tls-ca-chain"] + + # update the databag, if connection data did not change with respect to before + # the relation changed event is not triggered + updated_connection_data = {} + for configuration_option, configuration_value in connection_data.items(): + if configuration_option in s3_list_options: + updated_connection_data[configuration_option] = json.dumps(configuration_value) + else: + updated_connection_data[configuration_option] = configuration_value + + relation.data[self.local_app].update(updated_connection_data) + logger.debug("Updated S3 connection info.") + + @property + def relations(self) -> List[Relation]: + """The list of Relation instances associated with this relation_name.""" + return list(self.charm.model.relations[self.relation_name]) + + def set_bucket(self, relation_id: int, bucket: str) -> None: + """Sets bucket name in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + bucket: the bucket name. + """ + self.update_connection_info(relation_id, {"bucket": bucket}) + + def set_access_key(self, relation_id: int, access_key: str) -> None: + """Sets access-key value in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + access_key: the access-key value. + """ + self.update_connection_info(relation_id, {"access-key": access_key}) + + def set_secret_key(self, relation_id: int, secret_key: str) -> None: + """Sets the secret key value in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + secret_key: the value of the secret key. + """ + self.update_connection_info(relation_id, {"secret-key": secret_key}) + + def set_path(self, relation_id: int, path: str) -> None: + """Sets the path value in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + path: the path value. + """ + self.update_connection_info(relation_id, {"path": path}) + + def set_endpoint(self, relation_id: int, endpoint: str) -> None: + """Sets the endpoint address in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + endpoint: the endpoint address. + """ + self.update_connection_info(relation_id, {"endpoint": endpoint}) + + def set_region(self, relation_id: int, region: str) -> None: + """Sets the region location in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + region: the region address. + """ + self.update_connection_info(relation_id, {"region": region}) + + def set_s3_uri_style(self, relation_id: int, s3_uri_style: str) -> None: + """Sets the S3 URI style in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + s3_uri_style: the s3 URI style. + """ + self.update_connection_info(relation_id, {"s3-uri-style": s3_uri_style}) + + def set_storage_class(self, relation_id: int, storage_class: str) -> None: + """Sets the storage class in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + storage_class: the storage class. + """ + self.update_connection_info(relation_id, {"storage-class": storage_class}) + + def set_tls_ca_chain(self, relation_id: int, tls_ca_chain: List[str]) -> None: + """Sets the tls_ca_chain value in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + tls_ca_chain: the TLS Chain value. + """ + self.update_connection_info(relation_id, {"tls-ca-chain": tls_ca_chain}) + + def set_s3_api_version(self, relation_id: int, s3_api_version: str) -> None: + """Sets the S3 API version in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + s3_api_version: the S3 version value. + """ + self.update_connection_info(relation_id, {"s3-api-version": s3_api_version}) + + def set_delete_older_than_days(self, relation_id: int, days: int) -> None: + """Sets the retention days for full backups in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + days: the value. + """ + self.update_connection_info(relation_id, {"delete-older-than-days": str(days)}) + + def set_attributes(self, relation_id: int, attributes: List[str]) -> None: + """Sets the connection attributes in application databag. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + attributes: the attributes value. + """ + self.update_connection_info(relation_id, {"attributes": attributes}) + + +class S3Event(RelationEvent): + """Base class for S3 storage events.""" + + @property + def bucket(self) -> Optional[str]: + """Returns the bucket name.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("bucket") + + @property + def access_key(self) -> Optional[str]: + """Returns the access key.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("access-key") + + @property + def secret_key(self) -> Optional[str]: + """Returns the secret key.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("secret-key") + + @property + def path(self) -> Optional[str]: + """Returns the path where data can be stored.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("path") + + @property + def endpoint(self) -> Optional[str]: + """Returns the endpoint address.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("endpoint") + + @property + def region(self) -> Optional[str]: + """Returns the region.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("region") + + @property + def s3_uri_style(self) -> Optional[str]: + """Returns the s3 uri style.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("s3-uri-style") + + @property + def storage_class(self) -> Optional[str]: + """Returns the storage class name.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("storage-class") + + @property + def tls_ca_chain(self) -> Optional[List[str]]: + """Returns the TLS CA chain.""" + if not self.relation.app: + return None + + tls_ca_chain = self.relation.data[self.relation.app].get("tls-ca-chain") + if tls_ca_chain is not None: + return json.loads(tls_ca_chain) + return None + + @property + def s3_api_version(self) -> Optional[str]: + """Returns the S3 API version.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("s3-api-version") + + @property + def delete_older_than_days(self) -> Optional[int]: + """Returns the retention days for full backups.""" + if not self.relation.app: + return None + + days = self.relation.data[self.relation.app].get("delete-older-than-days") + if days is None: + return None + return int(days) + + @property + def attributes(self) -> Optional[List[str]]: + """Returns the attributes.""" + if not self.relation.app: + return None + + attributes = self.relation.data[self.relation.app].get("attributes") + if attributes is not None: + return json.loads(attributes) + return None + + +class CredentialsChangedEvent(S3Event): + """Event emitted when S3 credential are changed on this relation.""" + + +class CredentialsGoneEvent(RelationEvent): + """Event emitted when S3 credential are removed from this relation.""" + + +class S3CredentialRequiresEvents(ObjectEvents): + """Event descriptor for events raised by the S3Provider.""" + + credentials_changed = EventSource(CredentialsChangedEvent) + credentials_gone = EventSource(CredentialsGoneEvent) + + +S3_REQUIRED_OPTIONS = ["access-key", "secret-key"] + + +class S3Requirer(Object): + """Requires-side of the s3 relation.""" + + on = S3CredentialRequiresEvents() # pyright: ignore[reportAssignmentType] + + def __init__( + self, charm: ops.charm.CharmBase, relation_name: str, bucket_name: Optional[str] = None + ): + """Manager of the s3 client relations.""" + super().__init__(charm, relation_name) + + self.relation_name = relation_name + self.charm = charm + self.local_app = self.charm.model.app + self.local_unit = self.charm.unit + self.bucket = bucket_name + + self.framework.observe( + self.charm.on[self.relation_name].relation_changed, self._on_relation_changed + ) + + self.framework.observe( + self.charm.on[self.relation_name].relation_joined, self._on_relation_joined + ) + + self.framework.observe( + self.charm.on[self.relation_name].relation_broken, + self._on_relation_broken, + ) + + def _generate_bucket_name(self, event: RelationJoinedEvent): + """Returns the bucket name generated from relation id.""" + return f"relation-{event.relation.id}" + + def _on_relation_joined(self, event: RelationJoinedEvent) -> None: + """Event emitted when the application joins the s3 relation.""" + if self.bucket is None: + self.bucket = self._generate_bucket_name(event) + self.update_connection_info(event.relation.id, {"bucket": self.bucket}) + + def fetch_relation_data(self) -> dict: + """Retrieves data from relation. + + This function can be used to retrieve data from a relation + in the charm code when outside an event callback. + + Returns: + a dict of the values stored in the relation data bag + for all relation instances (indexed by the relation id). + """ + data = {} + + for relation in self.relations: + data[relation.id] = self._load_relation_data(relation.data[self.charm.app]) + return data + + def update_connection_info(self, relation_id: int, connection_data: dict) -> None: + """Updates the credential data as set of key-value pairs in the relation. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + connection_data: dict containing the key-value pairs + that should be updated. + """ + # check and write changes only if you are the leader + if not self.local_unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + return + + # update the databag, if connection data did not change with respect to before + # the relation changed event is not triggered + # configuration options that are list + s3_list_options = ["attributes", "tls-ca-chain"] + updated_connection_data = {} + for configuration_option, configuration_value in connection_data.items(): + if configuration_option in s3_list_options: + updated_connection_data[configuration_option] = json.dumps(configuration_value) + else: + updated_connection_data[configuration_option] = configuration_value + + relation.data[self.local_app].update(updated_connection_data) + logger.debug("Updated S3 credentials.") + + def _load_relation_data(self, raw_relation_data: RelationDataContent) -> Dict[str, str]: + """Loads relation data from the relation data bag. + + Args: + raw_relation_data: Relation data from the databag + Returns: + dict: Relation data in dict format. + """ + connection_data = {} + for key in raw_relation_data: + try: + connection_data[key] = json.loads(raw_relation_data[key]) + except (json.decoder.JSONDecodeError, TypeError): + connection_data[key] = raw_relation_data[key] + return connection_data + + def _diff(self, event: RelationChangedEvent) -> Diff: + """Retrieves the diff of the data in the relation changed databag. + + Args: + event: relation changed event. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + return diff(event, self.local_unit) + + def _on_relation_changed(self, event: RelationChangedEvent) -> None: + """Notify the charm about the presence of S3 credentials.""" + # check if the mandatory options are in the relation data + contains_required_options = True + # get current credentials data + credentials = self.get_s3_connection_info() + # records missing options + missing_options = [] + for configuration_option in S3_REQUIRED_OPTIONS: + if configuration_option not in credentials: + contains_required_options = False + missing_options.append(configuration_option) + # emit credential change event only if all mandatory fields are present + if contains_required_options: + getattr(self.on, "credentials_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + else: + logger.warning( + f"Some mandatory fields: {missing_options} are not present, do not emit credential change event!" + ) + + def get_s3_connection_info(self) -> Dict[str, str]: + """Return the s3 credentials as a dictionary.""" + for relation in self.relations: + if relation and relation.app: + return self._load_relation_data(relation.data[relation.app]) + + return {} + + def _on_relation_broken(self, event: RelationBrokenEvent) -> None: + """Notify the charm about a broken S3 credential store relation.""" + getattr(self.on, "credentials_gone").emit(event.relation, app=event.app, unit=event.unit) + + @property + def relations(self) -> List[Relation]: + """The list of Relation instances associated with this relation_name.""" + return list(self.charm.model.relations[self.relation_name]) diff --git a/tests/v1/conftest.py b/tests/v1/conftest.py index c7caaf70..ebcc0709 100644 --- a/tests/v1/conftest.py +++ b/tests/v1/conftest.py @@ -66,13 +66,3 @@ def only_with_juju_secrets(juju_has_secrets): """Pretty way to skip Juju 3 tests.""" if not juju_has_secrets: pytest.skip("Secrets test only applies on Juju 3.x") - - -@pytest.fixture -def only_without_juju_secrets(juju_has_secrets): - """Pretty way to skip Juju 2-specific tests. - - Typically: to save CI time, when the same check were executed in a Juju 3-specific way already - """ - if juju_has_secrets: - pytest.skip("Skipping legacy secrets tests") diff --git a/tests/v1/integration/application-charm/charmcraft.yaml b/tests/v1/integration/application-charm/charmcraft.yaml index 97a4b3c2..3109b74e 100644 --- a/tests/v1/integration/application-charm/charmcraft.yaml +++ b/tests/v1/integration/application-charm/charmcraft.yaml @@ -2,12 +2,11 @@ # See LICENSE file for licensing details. type: charm -# Whenever "bases" is changed: -# - Update tests/integration/conftest.py::pytest_configure() -# - Update .github/workflow/ci.yaml integration-test matrix +base: ubuntu@22.04 platforms: - ubuntu@22.04:amd64: - ubuntu@24.04:amd64: + amd64: + build-on: [amd64] + build-for: [amd64] # Files implicitly created by charmcraft without a part: # - dispatch (https://github.com/canonical/charmcraft/pull/1898) @@ -60,14 +59,7 @@ parts: - pkg-config # Needed to build Python dependencies with Rust from source - libpq-dev override-build: | - # Workaround for https://github.com/canonical/charmcraft/issues/2068 - # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source - if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] - then - snap install rustup --classic - else - apt-get install rustup -y - fi + snap install rustup --classic # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu # archive—which means the rustup version could be updated at any time. Print rustup version diff --git a/tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/data_interfaces.py b/tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/data_interfaces.py new file mode 100644 index 00000000..e22388d1 --- /dev/null +++ b/tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -0,0 +1,2753 @@ +# Copyright 2025 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +r"""Library to manage the relation for the data-platform products. + +This V1 has been specified in https://docs.google.com/document/d/1lnuonWnoQb36RWYwfHOBwU0VClLbawpTISXIC_yNKYo, and should be backward compatible with v0 clients. + +This library contains the Requires and Provides classes for handling the relation +between an application and multiple managed application supported by the data-team: +MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. + +#### Models + +This library exposes basic default models that can be used in most cases. +If you need more complex models, you can subclass them. + +```python +from charms.data_platform_libs.v1.data_interfaces import RequirerCommonModel, ExtraSecretStr + +class ExtendedCommonModel(RequirerCommonModel): + operator_password: ExtraSecretStr +``` + +Secret groups are handled using annotated types. If you wish to add extra secret groups, please follow the following model. The string metadata represents the secret group name, and `OptionalSecretStr` is a TypeAlias for `SecretStr | None`. Finally, `SecretStr` represents a field validating the URI pattern `secret:.*` + +```python +MyGroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] +``` + +Fields not specified as OptionalSecretStr and extended with a group name in the metadata will NOT get serialised. + + +#### Requirer Charm + +This library is a uniform interface to a selection of common database +metadata, with added custom events that add convenience to database management, +and methods to consume the application related data. + + +```python +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ClientCharm(CharmBase): + # Database charm that accepts connections from application charms. + def __init__(self, *args) -> None: + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + RequirerCommonModel( + entity_type="USER", + ) + ] + self.database = ResourceRequirerEventHandler( + self,"database", requests, response_model=ResourceProviderModel + ) + self.framework.observe(self.database.on.resource_created, self._on_resource_created) + self.framework.observe(self.database.on.resource_entity_created, self._on_resource_entity_created) + + def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new database is created. + relation_id = event.relation.id + response = event.response # This is the response model + + username = event.response.username + password = event.response.password + ... + + def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new entity is created. + ... + +Compared to V1, this library makes heavy use of pydantic models, and allows for +multiple requests, specified as a list. +On the Requirer side, each response will trigger one custom event for that response. +This way, it allows for more strategic events to be emitted according to the request. + +As show above, the library provides some custom events to handle specific situations, which are listed below: +- resource_created: event emitted when the requested database is created. +- resource_entity_created: event emitted when the requested entity is created. +- endpoints_changed: event emitted when the read/write endpoints of the database have changed. +- read_only_endpoints_changed: event emitted when the read-only endpoints of the database + have changed. Event is not triggered if read/write endpoints changed too. + +If it is needed to connect multiple database clusters to the same relation endpoint +the application charm can implement the same code as if it would connect to only +one database cluster (like the above code example). + +To differentiate multiple clusters connected to the same relation endpoint +the application charm can use the name of the remote application: + +```python + +def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Get the remote app name of the cluster that triggered this event + cluster = event.relation.app.name +``` + +It is also possible to provide an alias for each different database cluster/relation. + +So, it is possible to differentiate the clusters in two ways. +The first is to use the remote application name, i.e., `event.relation.app.name`, as above. + +The second way is to use different event handlers to handle each cluster events. +The implementation would be something like the following code: + +```python + +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ApplicationCharm(CharmBase): + # Application charm that connects to database charms. + + def __init__(self, *args): + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + ] + # Define the cluster aliases and one handler for each cluster database created event. + self.database = ResourceRequirerEventHandler( + self, + relation_name="database" + relations_aliases = ["cluster1", "cluster2"], + requests= + ) + self.framework.observe( + self.database.on.cluster1_resource_created, self._on_cluster1_resource_created + ) + self.framework.observe( + self.database.on.cluster2_resource_created, self._on_cluster2_resource_created + ) + + def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster1 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... + + def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster2 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... +``` + +### Provider Charm + +Following an example of using the ResourceRequestedEvent, in the context of the +database charm code: + +```python +from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides + +class SampleCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + # Charm events defined in the database provides charm library. + self.provided_database = DatabaseProvides(self, relation_name="database") + self.framework.observe(self.provided_database.on.database_requested, + self._on_database_requested) + # Database generic helper + self.database = DatabaseHelper() + + def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: + # Handle the event triggered by a new database requested in the relation + # Retrieve the database name using the charm library. + db_name = event.database + # generate a new user credential + username = self.database.generate_user() + password = self.database.generate_password() + # set the credentials for the relation + self.provided_database.set_credentials(event.relation.id, username, password) + # set other variables for the relation event.set_tls("False") +``` + +As shown above, the library provides a custom event (database_requested) to handle +the situation when an application charm requests a new database to be created. +It's preferred to subscribe to this event instead of relation changed event to avoid +creating a new database when other information other than a database name is +exchanged in the relation databag. + +""" + +import copy +import hashlib +import json +import logging +import pickle +import random +import string +from abc import ABC, abstractmethod +from collections import namedtuple +from datetime import datetime +from enum import Enum +from typing import ( + Annotated, + Any, + ClassVar, + Generic, + Literal, + NewType, + TypeAlias, + TypeVar, + final, + overload, +) + +from ops import ( + CharmBase, + EventBase, + Model, + RelationChangedEvent, + RelationCreatedEvent, + RelationEvent, + Secret, + SecretChangedEvent, + SecretInfo, + SecretNotFoundError, +) +from ops.charm import CharmEvents +from ops.framework import EventSource, Handle, Object +from ops.model import Application, ModelError, Relation, Unit +from pydantic import ( + AfterValidator, + AliasChoices, + BaseModel, + ConfigDict, + Discriminator, + Field, + SecretStr, + SerializationInfo, + SerializerFunctionWrapHandler, + Tag, + TypeAdapter, + ValidationInfo, + model_serializer, + model_validator, +) +from pydantic.types import _SecretBase, _SecretField +from pydantic_core import CoreSchema, core_schema +from typing_extensions import TypeAliasType, override + +try: + import psycopg2 +except ImportError: + psycopg2 = None + +# The unique Charmhub library identifier, never change it +LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" + +# Increment this major API version when introducing breaking changes +LIBAPI = 1 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 0 + +PYDEPS = ["ops>=2.0.0", "pydantic>=2.11"] + +logger = logging.getLogger(__name__) + +MODEL_ERRORS = { + "not_leader": "this unit is not the leader", + "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", + "owner_no_refresh": "ERROR secret owner cannot use --refresh", +} + +RESOURCE_ALIASES = [ + "database", + "subject", + "topic", + "index", + "plugin-url", +] + +SECRET_PREFIX = "secret-" + + +############################################################################## +# Exceptions +############################################################################## + + +class DataInterfacesError(Exception): + """Common ancestor for DataInterfaces related exceptions.""" + + +class SecretError(DataInterfacesError): + """Common ancestor for Secrets related exceptions.""" + + +class SecretAlreadyExistsError(SecretError): + """A secret that was to be added already exists.""" + + +class SecretsUnavailableError(SecretError): + """Secrets aren't yet available for Juju version used.""" + + +class IllegalOperationError(DataInterfacesError): + """To be used when an operation is not allowed to be performed.""" + + +############################################################################## +# Global helpers / utilities +############################################################################## + + +def gen_salt() -> str: + """Generates a consistent salt.""" + return "".join(random.choices(string.ascii_letters + string.digits, k=16)) + + +def gen_hash(resource_name: str, salt: str) -> str: + """Generates a consistent hash based on the resource name and salt.""" + hasher = hashlib.sha256() + hasher.update(f"{resource_name}:{salt}".encode()) + return hasher.hexdigest()[:16] + + +def ensure_leader_for_app(f): + """Decorator to ensure that only leader can perform given operation.""" + + def wrapper(self, *args, **kwargs): + if self.component == self._local_app and not self._local_unit.is_leader(): + logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") + return + return f(self, *args, **kwargs) + + wrapper.leader_only = True + return wrapper + + +def get_encoded_dict( + relation: Relation, member: Unit | Application, field: str +) -> dict[str, Any] | None: + """Retrieve and decode an encoded field from relation data.""" + data = json.loads(relation.data[member].get(field, "{}")) + if isinstance(data, dict): + return data + logger.error("Unexpected datatype for %s instead of dict.", str(data)) + + +Diff = namedtuple("Diff", ["added", "changed", "deleted"]) +Diff.__doc__ = """ +A tuple for storing the diff between two data mappings. + +added - keys that were added +changed - keys that still exist but have new values +deleted - key that were deleted""" + + +def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: + """Retrieves the diff of the data in the relation changed databag for v1. + + Args: + old_data: dictionary of the stored data before the event. + new_data: dictionary of the received data to compute the diff. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + old_data = old_data or {} + + # These are the keys that were added to the databag and triggered this event. + added = new_data.keys() - old_data.keys() + # These are the keys that were removed from the databag and triggered this event. + deleted = old_data.keys() - new_data.keys() + # These are the keys that already existed in the databag, + # but had their values changed. + changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} + # Return the diff with all possible changes. + return Diff(added, changed, deleted) + + +def resource_added(diff: Diff) -> bool: + """Ensures that one of the aliased resources has been added.""" + return any(item in diff.added for item in RESOURCE_ALIASES + ["resource"]) + + +def store_new_data( + relation: Relation, + component: Unit | Application, + new_data: dict[str, str], + short_uuid: str | None = None, +): + """Stores the new data in the databag for diff computation.""" + # First, the case for V0 + if not short_uuid: + relation.data[component].update({"data": json.dumps(new_data)}) + # Then the case for V1, where we have a ShortUUID + else: + data = json.loads(relation.data[component].get("data", "{}")) + if not isinstance(data, dict): + raise ValueError + newest_data = copy.deepcopy(data) + newest_data[short_uuid] = new_data + relation.data[component].update({"data": json.dumps(newest_data)}) + + +############################################################################## +# Helper classes +############################################################################## + +SecretGroup = NewType("SecretGroup", str) + + +SecretString = TypeAliasType("SecretString", Annotated[str, Field(pattern="secret:.*")]) + + +class SecretBool(_SecretField[bool]): + """Class for booleans as secrets.""" + + _inner_schema: ClassVar[CoreSchema] = core_schema.bool_schema() + _error_kind: ClassVar[str] = "bool_type" + + def _display(self) -> str: + return "****" + + +OptionalSecretStr: TypeAlias = SecretStr | None +OptionalSecretBool: TypeAlias = SecretBool | None + +OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) + +UserSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "user"] +TlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "tls"] +TlsSecretBool = Annotated[OptionalSecretBool, Field(exclude=True, default=None), "tls"] +MtlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mtls"] +ExtraSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "extra"] +EntitySecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "entity"] + + +class Scope(Enum): + """Peer relations scope.""" + + APP = "app" + UNIT = "unit" + + +class CachedSecret: + """Locally cache a secret. + + The data structure is precisely reusing/simulating as in the actual Secret Storage + """ + + KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] + + def __init__( + self, + model: Model, + component: Application | Unit, + label: str, + secret_uri: str | None = None, + ): + self._secret_meta = None + self._secret_content = {} + self._secret_uri = secret_uri + self.label = label + self._model = model + self.component = component + self.current_label = None + + @property + def meta(self) -> Secret | None: + """Getting cached secret meta-information.""" + if not self._secret_meta: + if not (self._secret_uri or self.label): + return + + try: + self._secret_meta = self._model.get_secret(label=self.label) + except SecretNotFoundError: + # Falling back to seeking for potential legacy labels + logger.info(f"Secret with label {self.label} not found") + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) + return self._secret_meta + + ########################################################################## + # Public functions + ########################################################################## + + def add_secret( + self, + content: dict[str, str], + relation: Relation | None = None, + label: str | None = None, + ) -> Secret: + """Create a new secret.""" + if self._secret_uri: + raise SecretAlreadyExistsError( + "Secret is already defined with uri %s", self._secret_uri + ) + + label = self.label if not label else label + + secret = self.component.add_secret(content, label=label) + if relation and relation.app != self._model.app: + # If it's not a peer relation, grant is to be applied + secret.grant(relation) + self._secret_uri = secret.id + self._secret_meta = secret + return self._secret_meta + + def get_content(self) -> dict[str, str]: + """Getting cached secret content.""" + if not self._secret_content: + if self.meta: + try: + self._secret_content = self.meta.get_content(refresh=True) + except (ValueError, ModelError) as err: + # https://bugs.launchpad.net/juju/+bug/2042596 + # Only triggered when 'refresh' is set + if isinstance(err, ModelError) and not any( + msg in str(err) for msg in self.KNOWN_MODEL_ERRORS + ): + raise + # Due to: ValueError: Secret owner cannot use refresh=True + self._secret_content = self.meta.get_content() + return self._secret_content + + def set_content(self, content: dict[str, str]) -> None: + """Setting cached secret content.""" + if not self.meta: + return + + if content == self.get_content(): + return + + if content: + self.meta.set_content(content) + self._secret_content = content + else: + self.meta.remove_all_revisions() + + def get_info(self) -> SecretInfo | None: + """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" + if self.meta: + return self.meta.get_info() + + def remove(self) -> None: + """Remove secret.""" + if not self.meta: + raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") + try: + self.meta.remove_all_revisions() + except SecretNotFoundError: + pass + self._secret_content = {} + self._secret_meta = None + self._secret_uri = None + + +class SecretCache: + """A data structure storing CachedSecret objects.""" + + def __init__(self, model: Model, component: Application | Unit): + self._model = model + self.component = component + self._secrets: dict[str, CachedSecret] = {} + + def get(self, label: str, uri: str | None = None) -> CachedSecret | None: + """Getting a secret from Juju Secret store or cache.""" + if not self._secrets.get(label): + secret = CachedSecret(self._model, self.component, label, uri) + if secret.meta: + self._secrets[label] = secret + return self._secrets.get(label) + + def add(self, label: str, content: dict[str, str], relation: Relation) -> CachedSecret: + """Adding a secret to Juju Secret.""" + if self._secrets.get(label): + raise SecretAlreadyExistsError(f"Secret {label} already exists") + + secret = CachedSecret(self._model, self.component, label) + secret.add_secret(content, relation) + self._secrets[label] = secret + return self._secrets[label] + + def remove(self, label: str) -> None: + """Remove a secret from the cache.""" + if secret := self.get(label): + try: + secret.remove() + self._secrets.pop(label) + except (SecretsUnavailableError, KeyError): + pass + else: + return + logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) + + +############################################################################## +# Models classes +############################################################################## + + +class PeerModel(BaseModel): + """Common Model for all peer relations.""" + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + extra="allow", + ) + + @model_validator(mode="after") + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get("repository") + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + if not secret: + logger.info(f"No secret for group {secret_group}") + continue + + value = secret.get_content().get(aliased_field) + + if value and field_info.annotation == OptionalSecretBool: + value = SecretBool(json.loads(value)) + elif value: + value = SecretStr(value) + setattr(self, field, value) + + return self + + @model_serializer(mode="wrap") + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get("repository") + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + value = getattr(self, field) + + actual_value = ( + value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + + if secret is None: + if value: + secret = repository.add_secret( + aliased_field, + actual_value, + secret_group, + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + continue + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if value is None: + full_content.pop(aliased_field, None) + else: + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + return handler(self) + + +class CommonModel(BaseModel): + """Common Model for both requirer and provider. + + request_id stores the request identifier for easier access. + resource is the requested resource. + """ + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + extra="allow", + ) + + resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") + request_id: str | None = Field(default=None) + salt: str = Field( + description="This salt is used to create unique hashes even when other fields map 1-1", + default_factory=gen_salt, + ) + + @model_validator(mode="after") + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get("repository") + short_uuid = self.request_id or gen_hash(self.resource, self.salt) + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + "-", "_" + ) + secret_uri: str | None = getattr(self, secret_field, None) + + if not secret_uri: + continue + + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + if not secret: + logger.info(f"No secret for group {secret_group} and short uuid {short_uuid}") + continue + + value = secret.get_content().get(aliased_field) + if value and field_info.annotation == OptionalSecretBool: + value = SecretBool(json.loads(value)) + elif value: + value = SecretStr(value) + + setattr(self, field, value) + return self + + @model_serializer(mode="wrap") + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + _encountered_secrets: set[tuple[CachedSecret, str]] = set() + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get("repository") + short_uuid = self.request_id or gen_hash(self.resource, self.salt) + # Backward compatibility for v0 regarding secrets. + if info.context.get("version") == "v0": + short_uuid = None + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + "-", "_" + ) + secret_uri: str | None = getattr(self, secret_field, None) + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + value = getattr(self, field) + + actual_value = ( + value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + + if secret is None: + if value: + secret = repository.add_secret( + aliased_field, actual_value, secret_group, short_uuid + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + setattr(self, secret_field, secret.meta.id) + continue + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if value is None: + full_content.pop(aliased_field, None) + _encountered_secrets.add((secret, secret_field)) + else: + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + + # Delete all empty secrets and clean up their fields. + for secret, secret_field in _encountered_secrets: + if not secret.get_content(): + # Setting a field to '' deletes it + setattr(self, secret_field, "") + repository.delete_secret(secret.label) + + return handler(self) + + @classmethod + def _get_secret_field(cls, field: str) -> SecretGroup | None: + """Checks if the field is a secret uri or not.""" + if not field.startswith(SECRET_PREFIX): + return None + + value = field.split("-")[1] + if info := cls.__pydantic_fields__.get(field.replace("-", "_")): + if info.annotation == SecretString: + return SecretGroup(value) + return None + + +class EntityPermissionModel(BaseModel): + """Entity Permissions Model.""" + + resource_name: str + resource_type: str + privileges: list + + +class RequirerCommonModel(CommonModel): + """Requirer side of the request model. + + extra_user_roles is used to request more roles for that user. + external_node_connectivity is used to indicate that the URI should be made for external clients when True + """ + + extra_user_roles: str | None = Field(default=None) + extra_group_roles: str | None = Field(default=None) + external_node_connectivity: bool = Field(default=False) + entity_type: Literal["USER", "GROUP"] | None = Field(default=None) + entity_permissions: list[EntityPermissionModel] | None = Field(default=None) + secret_mtls: SecretString | None = Field(default=None) + mtls_cert: MtlsSecretStr = Field(default=None) + + @model_validator(mode="after") + def validate_fields(self): + """Validates that no inconsistent request is being sent.""" + if self.entity_type and self.entity_type not in ["USER", "GROUP"]: + raise ValueError("Invalid entity-type. Possible values are USER and GROUP") + + if self.entity_type == "USER" and self.extra_group_roles: + raise ValueError("Inconsistent entity information. Use extra_user_roles instead") + + if self.entity_type == "GROUP" and self.extra_user_roles: + raise ValueError("Inconsistent entity information. Use extra_group_roles instead") + + return self + + +class ProviderCommonModel(CommonModel): + """Serialized fields added to the databag. + + endpoints stores the endpoints exposed to that client. + secret_user is a secret URI mapping to the user credentials + secret_tls is a secret URI mapping to the TLS certificate + secret_extra is a secret URI for all additional secrets requested. + """ + + endpoints: str | None = Field(default=None) + read_only_endpoints: str | None = Field(default=None) + secret_user: SecretString | None = Field(default=None) + secret_tls: SecretString | None = Field(default=None) + secret_extra: SecretString | None = Field(default=None) + secret_entity: SecretString | None = Field(default=None) + + +class ResourceProviderModel(ProviderCommonModel): + """Extended model including the deserialized fields.""" + + username: UserSecretStr = Field(default=None) + password: UserSecretStr = Field(default=None) + uris: UserSecretStr = Field(default=None) + read_only_uris: UserSecretStr = Field(default=None) + tls: TlsSecretBool = Field(default=None) + tls_ca: TlsSecretStr = Field(default=None) + entity_name: EntitySecretStr = Field(default=None) + entity_password: EntitySecretStr = Field(default=None) + version: str | None = Field(default=None) + + +class RequirerDataContractV0(RequirerCommonModel): + """Backward compatibility.""" + + version: Literal["v0"] = Field(default="v0") + + original_field: str = Field(exclude=True, default="") + + @model_validator(mode="before") + @classmethod + def ensure_original_field(cls, data: Any): + """Ensures that we keep the original field.""" + if isinstance(data, dict): + for alias in RESOURCE_ALIASES: + if data.get(alias) is not None: + data["original_field"] = alias + break + else: + for alias in RESOURCE_ALIASES: + if getattr(data, alias) is not None: + data.original_field = alias + return data + + +TResourceProviderModel = TypeVar("TResourceProviderModel", bound=ResourceProviderModel) +TRequirerCommonModel = TypeVar("TRequirerCommonModel", bound=RequirerCommonModel) + + +class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): + """The new Data Contract.""" + + version: Literal["v1"] = Field(default="v1") + requests: list[TRequirerCommonModel] + + +def discriminate_on_version(payload: Any) -> str: + """Use the version to discriminate.""" + if isinstance(payload, dict): + return payload.get("version", "v0") + return getattr(payload, "version", "v0") + + +RequirerDataContractType = Annotated[ + Annotated[RequirerDataContractV0, Tag("v0")] | Annotated[RequirerDataContractV1, Tag("v1")], + Discriminator(discriminate_on_version), +] + + +RequirerDataContract = TypeAdapter(RequirerDataContractType) + + +class DataContractV0(ResourceProviderModel): + """The Data contract of the response, for V0.""" + + +class DataContractV1(BaseModel, Generic[TResourceProviderModel]): + """The Data contract of the response, for V1.""" + + version: Literal["v1"] = Field(default="v1") + requests: list[TResourceProviderModel] = Field(default_factory=list) + + +DataContact = TypeAdapter(DataContractV1[ResourceProviderModel]) + + +TCommonModel = TypeVar("TCommonModel", bound=CommonModel) + + +def is_topic_value_acceptable(value: str | None) -> str | None: + """Check whether the given Kafka topic value is acceptable.""" + if value and "*" in value[:3]: + raise ValueError(f"Error on topic '{value}',, unacceptable value.") + return value + + +class KafkaRequestModel(RequirerCommonModel): + """Specialised model for Kafka.""" + + consumer_group_prefix: Annotated[str | None, AfterValidator(is_topic_value_acceptable)] = ( + Field(default=None) + ) + + +class KafkaResponseModel(ResourceProviderModel): + """Kafka response model.""" + + consumer_group_prefix: ExtraSecretStr = Field(default=None) + zookeeper_uris: ExtraSecretStr = Field(default=None) + + +############################################################################## +# AbstractRepository class +############################################################################## + + +class AbstractRepository(ABC): + """Abstract repository interface.""" + + @abstractmethod + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + ... + + @abstractmethod + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> str | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def get_field(self, field: str) -> str | None: + """Gets the value for one field.""" + ... + + @abstractmethod + def get_fields(self, *fields: str) -> dict[str, str | None]: + """Gets the values for all provided fields.""" + ... + + @abstractmethod + def write_field(self, field: str, value: Any) -> None: + """Writes the value in the field, without any secret support.""" + ... + + @abstractmethod + def write_fields(self, mapping: dict[str, Any]) -> None: + """Writes the values of mapping in the fields without any secret support (keys of mapping).""" + ... + + def write_secret_field( + self, field: str, value: Any, group: SecretGroup + ) -> CachedSecret | None: + """Writes a secret field.""" + ... + + @abstractmethod + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def delete_secret(self, label: str): + """Deletes a secret by its label.""" + ... + + @abstractmethod + def delete_field(self, field: str) -> None: + """Deletes a field.""" + ... + + @abstractmethod + def delete_fields(self, *fields: str) -> None: + """Deletes all the provided fields.""" + ... + + @abstractmethod + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + """Delete a field stored in a secret group.""" + ... + + @abstractmethod + def register_secret(self, secret_group: SecretGroup, short_uuid: str | None = None) -> None: + """Registers a secret using the repository.""" + ... + + @abstractmethod + def get_data(self) -> dict[str, Any] | None: + """Gets the whole data.""" + ... + + @abstractmethod + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Builds a secret field.""" + + +class OpsRepository(AbstractRepository): + """Implementation for ops repositories, with some methods left out.""" + + SECRET_FIELD_NAME: str + + IGNORES_GROUPS: list[SecretGroup] = [] + + uri_to_databag: bool = True + + def __init__( + self, + model: Model, + relation: Relation | None, + component: Unit | Application, + ): + self._local_app = model.app + self._local_unit = model.unit + self.relation = relation + self.component = component + self.model = model + self.secrets = SecretCache(model, component) + + @abstractmethod + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group mapping for secrets within a relation context.""" + ... + + @override + def get_data(self) -> dict[str, Any] | None: + ret: dict[str, Any] = {} + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + for key, value in self.relation.data[self.component].items(): + try: + ret[key] = json.loads(value) + except json.JSONDecodeError: + ret[key] = value + + return ret + + @override + @ensure_leader_for_app + def get_field( + self, + field: str, + ) -> str | None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + relation_data = self.relation.data[self.component] + return relation_data.get(field) + + @override + @ensure_leader_for_app + def get_fields(self, *fields: str) -> dict[str, str]: + res = {} + for field in fields: + if (value := self.get_field(field)) is not None: + res[field] = value + return res + + @override + @ensure_leader_for_app + def write_field(self, field: str, value: Any) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + if not value: + return None + self.relation.data[self.component].update({field: value}) + + @override + @ensure_leader_for_app + def write_fields(self, mapping: dict[str, Any]) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + (self.write_field(field, value) for field, value in mapping.items()) + + @override + @ensure_leader_for_app + def write_secret_field( + self, field: str, value: Any, secret_group: SecretGroup + ) -> CachedSecret | None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = self.get_field(self.secret_field(secret_group, field)) + + secret = self.secrets.get(label=label, uri=secret_uri) + if not secret: + return self.add_secret(field, value, secret_group) + else: + content = secret.get_content() + full_content = copy.deepcopy(content) + full_content.update({field: value}) + secret.set_content(full_content) + return secret + + @override + @ensure_leader_for_app + def delete_field(self, field: str) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + relation_data = self.relation.data[self.component] + try: + relation_data.pop(field) + except KeyError: + logger.debug( + f"Non existent field {field} was attempted to be removed from the databag (relation ID: {self.relation.id})" + ) + + @override + @ensure_leader_for_app + def delete_fields(self, *fields: str) -> None: + (self.delete_field(field) for field in fields) + + @override + @ensure_leader_for_app + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + relation_data = self.relation.data[self.component] + secret_field = self.secret_field(secret_group, field) + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = relation_data.get(secret_field) + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logging.error(f"Can't delete secret for relation {self.relation.id}") + return None + + content = secret.get_content() + new_content = copy.deepcopy(content) + try: + new_content.pop(field) + except KeyError: + logging.debug( + f"Non-existing secret '{field}' was attempted to be removed" + f"from relation {self.relation.id} and group {secret_group}" + ) + + # Write the new secret content if necessary + if new_content: + secret.set_content(new_content) + return + + # Remove the secret from the relation if it's fully gone. + try: + relation_data.pop(field) + except KeyError: + pass + self.secrets.remove(label) + return + + @ensure_leader_for_app + def register_secret(self, uri: str, secret_group: SecretGroup, short_uuid: str | None = None): + """Registers the secret group for this relation. + + [MAGIC HERE] + If we fetch a secret using get_secret(id=, label=), + then will be "stuck" on the Secret object, whenever it may + appear (i.e. as an event attribute, or fetched manually) on future occasions. + + This will allow us to uniquely identify the secret on Provider side (typically on + 'secret-changed' events), and map it to the corresponding relation. + """ + if not self.relation: + raise ValueError("Cannot register without relation.") + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + CachedSecret(self.model, self.component, label, uri).meta + + @override + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + return None + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + return self.secrets.get(label=label, uri=secret_uri) + + @override + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + uri: str | None = None, + short_uuid: str | None = None, + ) -> Any | None: + """Gets a value for a field stored in a secret group.""" + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + return None + + secret_field = self.secret_field(secret_group, field) + + relation_data = self.relation.data[self.component] + secret_uri = uri or relation_data.get(secret_field) + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + if self.uri_to_databag and not secret_uri: + logger.info(f"No secret for group {secret_group} in relation {self.relation}") + return None + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logger.info(f"No secret for group {secret_group} in relation {self.relation}") + return None + + content = secret.get_content().get(field) + + if not content: + return + + try: + return json.loads(content) + except json.JSONDecodeError: + return content + + @override + @ensure_leader_for_app + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + if not self.relation: + logger.info("No relation to get value from") + return None + + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + + label = self._generate_secret_label(self.relation, secret_group, short_uuid) + + secret = self.secrets.add(label, {field: value}, self.relation) + + if not secret.meta or not secret.meta.id: + logging.error("Secret is missing Secret ID") + raise SecretError("Secret added but is missing Secret ID") + + return secret + + @override + @ensure_leader_for_app + def delete_secret(self, label: str) -> None: + self.secrets.remove(label) + + +@final +class OpsRelationRepository(OpsRepository): + """Implementation of the Abstract Repository for non peer relations.""" + + SECRET_FIELD_NAME: str = "secret" + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + if short_uuid: + return f"{relation.name}.{relation.id}.{short_uuid}.{secret_group}.secret" + return f"{relation.name}.{relation.id}.{secret_group}.secret" + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + return f"{self.SECRET_FIELD_NAME}-{secret_group}" + + +class OpsPeerRepository(OpsRepository): + """Implementation of the Ops Repository for peer relations.""" + + SECRET_FIELD_NAME = "internal_secret" + + IGNORES_GROUPS = [ + SecretGroup("user"), + SecretGroup("entity"), + SecretGroup("mtls"), + SecretGroup("tls"), + ] + + uri_to_databag: bool = False + + @property + def scope(self) -> Scope: + """Returns a scope.""" + if isinstance(self.component, Application): + return Scope.APP + if isinstance(self.component, Unit): + return Scope.UNIT + raise ValueError("Invalid component, neither a Unit nor an Application") + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + members = [relation.name, self._local_app.name, self.scope.value] + + if secret_group != SecretGroup("extra"): + members.append(secret_group) + return f"{'.'.join(members)}" + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + if not field: + raise ValueError("Must have a field.") + return f"{field}@{secret_group}" + + +@final +class OpsPeerUnitRepository(OpsPeerRepository): + """Implementation for a unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + super().__init__(model, relation, component) + + +@final +class OpsOtherPeerUnitRepository(OpsPeerRepository): + """Implementation for a remote unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + if component == model.unit: + raise ValueError(f"Can't instantiate {self.__class__.__name__} with local unit.") + super().__init__(model, relation, component) + + @override + def write_field(self, field: str, value: Any) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def write_fields(self, mapping: dict[str, Any]) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def add_secret( + self, field: str, value: Any, secret_group: SecretGroup, short_uuid: str | None = None + ) -> CachedSecret | None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_field(self, field: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_fields(self, *fields: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + +TRepository = TypeVar("TRepository", bound=OpsRepository) +TCommon = TypeVar("TCommon", bound=BaseModel) +TPeerCommon = TypeVar("TPeerCommon", bound=PeerModel) +TCommonBis = TypeVar("TCommonBis", bound=BaseModel) + + +class RepositoryInterface(Generic[TRepository, TCommon]): + """Repository builder.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + component: Unit | Application, + repository_type: type[TRepository], + model: type[TCommon] | TypeAdapter | None, + ): + self.charm = charm + self._model = charm.model + self.repository_type = repository_type + self.relation_name = relation_name + self.model = model + self.component = component + + @property + def relations(self) -> list[Relation]: + """The list of Relation instances associated with this relation name.""" + return self._model.relations[self.relation_name] + + def repository( + self, relation_id: int, component: Unit | Application | None = None + ) -> TRepository: + """Returns a repository for the relation.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + return self.repository_type(self._model, relation, component or self.component) + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommon], + component: Unit | Application | None = None, + ) -> TCommon: ... + + @overload + def build_model( + self, + relation_id: int, + model: TypeAdapter[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: None = None, + component: Unit | Application | None = None, + ) -> TCommon: ... + + def build_model( + self, + relation_id: int, + model: type[TCommon] | TypeAdapter[TCommonBis] | None = None, + component: Unit | Application | None = None, + ) -> TCommon | TCommonBis: + """Builds a model using the repository for that relation.""" + model = model or self.model # First the provided model (allows for specialisation) + component = component or self.component + if not model: + raise ValueError("Missing model to specialise data") + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + return build_model(self.repository_type(self._model, relation, component), model) + + def write_model( + self, relation_id: int, model: BaseModel, context: dict[str, str] | None = None + ): + """Writes the model using the repository.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + + write_model( + self.repository_type(self._model, relation, self.component), model, context=context + ) + + +class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, TCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.app, OpsRelationRepository, model) + + +class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.app, OpsPeerRepository, model) + + +class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): + """Specialised Interface to build repositories for this unit peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.unit, OpsPeerUnitRepository, model) + + +class OpsOtherPeerUnitRepositoryInterface( + RepositoryInterface[OpsOtherPeerUnitRepository, TPeerCommon] +): + """Specialised Interface to build repositories for another unit peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + unit: Unit, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, unit, OpsOtherPeerUnitRepository, model) + + +############################################################################## +# DDD implementation methods +############################################################################## +############################################################################## + + +def build_model(repository: AbstractRepository, model: type[TCommon] | TypeAdapter) -> TCommon: + """Builds a common model using the provided repository and provided model structure.""" + data = repository.get_data() or {} + + data.pop("data", None) + + # Beware this means all fields should have a default value here. + if isinstance(model, TypeAdapter): + return model.validate_python(data, context={"repository": repository}) + + return model.model_validate(data, context={"repository": repository}) + + +def write_model( + repository: AbstractRepository, model: BaseModel, context: dict[str, str] | None = None +): + """Writes the data stored in the model using the repository object.""" + context = context or {} + dumped = model.model_dump( + mode="json", context={"repository": repository} | context, exclude_none=False + ) + for field, value in dumped.items(): + if value is None: + repository.delete_field(field) + continue + dumped_value = value if isinstance(value, str) else json.dumps(value) + repository.write_field(field, dumped_value) + + +############################################################################## +# Custom Events +############################################################################## + + +class ResourceProviderEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.request = request + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["request"] = pickle.dumps(self.request) + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.request = pickle.loads(snapshot["request"]) + + +class ResourceRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource requested event.""" + + pass + + +class ResourceEntityRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource Entity requested event.""" + + pass + + +class ResourceEntityPermissionsChangedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + pass + + +class MtlsCertUpdatedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + old_mtls_cert: str | None = None, + ): + super().__init__(handle, relation, app, unit, request) + + self.old_mtls_cert = old_mtls_cert + + def snapshot(self): + """Return a snapshot of the event.""" + return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} + + def restore(self, snapshot): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.old_mtls_cert = snapshot["old_mtls_cert"] + + +class BulkResourcesRequestedEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + requests: list[TRequirerCommonModel], + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.requests = requests + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["requests"] = [pickle.dumps(request) for request in self.requests] + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.requests = [pickle.loads(request) for request in snapshot["requests"]] + + +class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + bulk_resources_requested = EventSource(BulkResourcesRequestedEvent) + resource_requested = EventSource(ResourceRequestedEvent) + resource_entity_requested = EventSource(ResourceEntityRequestedEvent) + resource_entity_permissions_changed = EventSource(ResourceEntityPermissionsChangedEvent) + mtls_cert_updated = EventSource(MtlsCertUpdatedEvent) + + +class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): + """Resource created/changed event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, response + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + response: TResourceProviderModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.response = response + + def snapshot(self) -> dict: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["response"] = pickle.dumps(self.response) + return snapshot + + def restore(self, snapshot: dict): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + + self.response = pickle.loads(snapshot["response"]) + + +class ResourceCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource has been created.""" + + pass + + +class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource entity has been created.""" + + pass + + +class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read/Write enpoints are changed.""" + + pass + + +class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read-only enpoints are changed.""" + + pass + + +class AuthenticationUpdatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Authentication was updated for a user.""" + + pass + + +class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + resource_created = EventSource(ResourceCreatedEvent) + resource_entity_created = EventSource(ResourceEntityCreatedEvent) + endpoints_changed = EventSource(ResourceEndpointsChangedEvent) + read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) + authentication_updated = EventSource(AuthenticationUpdatedEvent) + + +############################################################################## +# Event Handlers +############################################################################## + + +class EventHandlers(Object): + """Requires-side of the relation.""" + + component: Application | Unit + interface: RepositoryInterface + + def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ""): + """Manager of base client relations.""" + if not unique_key: + unique_key = relation_name + super().__init__(charm, unique_key) + + self.charm = charm + self.relation_name = relation_name + + self.framework.observe( + charm.on[self.relation_name].relation_changed, + self._on_relation_changed_event, + ) + + self.framework.observe( + self.charm.on[self.relation_name].relation_created, + self._on_relation_created_event, + ) + + self.framework.observe( + charm.on.secret_changed, + self._on_secret_changed_event, + ) + + @property + def relations(self) -> list[Relation]: + """Shortcut to get access to the relations.""" + return self.interface.relations + + # Event handlers + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + pass + + @abstractmethod + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _handle_event( + self, + ): + """Handles the event and reacts accordingly.""" + pass + + def compute_diff( + self, + relation: Relation, + request: RequirerCommonModel | ResourceProviderModel, + repository: AbstractRepository | None = None, + store: bool = True, + ) -> Diff: + """Computes, stores and returns a diff for that request.""" + if not repository: + repository = OpsRelationRepository(self.model, relation, component=relation.app) + + # Gets the data stored in the databag for diff computation + old_data = get_encoded_dict(relation, self.component, "data") + + # In case we're V1, we select specifically this request + if old_data and request.request_id: + old_data: dict | None = old_data.get(request.request_id, None) + + # dump the data of the current request so we can compare + new_data = request.model_dump( + mode="json", + exclude={"data"}, + exclude_none=True, + exclude_defaults=True, + ) + + # Computes the diff + _diff = diff(old_data, new_data) + + if store: + # Update the databag with the new data for later diff computations + store_new_data(relation, self.component, new_data, short_uuid=request.request_id) + + return _diff + + def _relation_from_secret_label(self, secret_label: str) -> Relation | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 3): + return + + try: + relation_id = int(contents[1]) + except ValueError: + return + + relation_name = contents[0] + + try: + return self.model.get_relation(relation_name, relation_id) + except ModelError: + return + + def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 5): + return + + return contents[2] + + +class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): + """Event Handler for resource provider.""" + + on = ResourceProvidesEvents[TRequirerCommonModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + request_model: type[TRequirerCommonModel], + unique_key: str = "", + mtls_enabled: bool = False, + bulk_event: bool = False, + ): + """Builds a resource provider event handler. + + Args: + charm: The charm. + relation_name: The relation name this event handler is listening to. + request_model: The request model that is expected to be received. + unique_key: An optional unique key for that object. + mtls_enabled: If True, means the server supports MTLS integration. + bulk_event: If this is true, only one event will be emitted with all requests in the case of a v1 requirer. + """ + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.app + self.request_model = request_model + self.interface = OpsRelationRepositoryInterface(charm, relation_name, request_model) + self.mtls_enabled = mtls_enabled + self.bulk_event = bulk_event + + @staticmethod + def _validate_diff(event: RelationEvent, _diff: Diff) -> None: + """Validates that entity information is not changed after relation is established. + + - When entity-type changes, backwards compatibility is broken. + - When extra-user-roles changes, role membership checks become incredibly complex. + - When extra-group-roles changes, role membership checks become incredibly complex. + """ + if not isinstance(event, RelationChangedEvent): + return + + for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: + if key in _diff.changed: + raise ValueError(f"Cannot change {key} after relation has already been created") + + def _dispatch_events(self, event: RelationEvent, _diff: Diff, request: RequirerCommonModel): + if self.mtls_enabled and "secret-mtls" in _diff.added: + getattr(self.on, "mtls_cert_updated").emit( + event.relation, app=event.app, unit=event.unit, request=request, old_mtls_cert=None + ) + return + # Emit a resource requested event if the setup key (resource name) + # was added to the relation databag, but the entity-type key was not. + if resource_added(_diff) and "entity-type" not in _diff.added: + getattr(self.on, "resource_requested").emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (resource name) + # was added to the relation databag, in addition to the entity-type key. + if resource_added(_diff) and "entity-type" in _diff.added: + getattr(self.on, "resource_entity_requested").emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (resource name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + not resource_added(_diff) + and "entity-type" not in _diff.added + and ("entity-permissions" in _diff.added or "entity-permissions" in _diff.changed) + ): + getattr(self.on, "resource_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit, request=request + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request: RequirerCommonModel, + ): + _diff = self.compute_diff(event.relation, request, repository) + + self._validate_diff(event, _diff) + self._dispatch_events(event, _diff, request) + + def _handle_bulk_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request_model: RequirerDataContractV1[TRequirerCommonModel], + ): + """Validate all the diffs, then dispatch the bulk event AND THEN stores the diff. + + This allows for the developer to process the diff and store it themselves + """ + for request in request_model.requests: + # Compute the diff without storing it so we can validate the diffs. + _diff = self.compute_diff(event.relation, request, repository, store=False) + self._validate_diff(event, _diff) + + getattr(self.on, "bulk_resources_requested").emit( + event.relation, app=event.app, unit=event.unit, requests=request_model.requests + ) + + # Store all the diffs if they were not already stored. + for request in request_model.requests: + new_data = request.model_dump( + mode="json", + exclude={"data"}, + context={"repository": repository}, + exclude_none=True, + exclude_defaults=True, + ) + store_new_data(event.relation, self.component, new_data, request.request_id) + + @override + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + if not self.mtls_enabled: + logger.info("MTLS is disabled, exiting early.") + return + if not event.secret.label: + return + + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + old_mtls_cert = event.secret.get_content().get("mtls-cert") + logger.info("mtls-cert-updated") + + # V0, just fire the event. + if version == "v0": + request = build_model(repository, RequirerDataContractV0) + # V1, find the corresponding request. + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if not short_uuid: + return + for _request in request_model.requests: + if _request.request_id == short_uuid: + request = _request + break + else: + logger.info(f"Unknown request id {short_uuid}") + return + + getattr(self.on, "mtls_cert_updated").emit( + relation, + app=relation.app, + unit=remote_unit, + request=request, + mtls_cert=old_mtls_cert, + ) + + @override + def _on_relation_changed_event(self, event: RelationChangedEvent): + if not self.charm.unit.is_leader(): + return + + repository = OpsRelationRepository( + self.model, event.relation, component=event.relation.app + ) + + # Don't do anything until we get some data + if not repository.get_data(): + return + + version = repository.get_field("version") or "v0" + if version == "v0": + request_model = build_model(repository, RequirerDataContractV0) + old_name = request_model.original_field + request_model.request_id = None # For safety, let's ensure that we don't have a model. + self._handle_event(event, repository, request_model) + logger.info( + f"Patching databag for v0 compatibility: replacing 'resource' by '{old_name}'" + ) + self.interface.repository( + event.relation.id, + ).write_field(old_name, request_model.resource) + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if self.bulk_event: + self._handle_bulk_event(event, repository, request_model) + return + for request in request_model.requests: + self._handle_event(event, repository, request) + + def set_response(self, relation_id: int, response: ResourceProviderModel): + r"""Sets a response in the databag. + + This function will react accordingly to the version number. + If the version number is v0, then we write the data directly in the databag. + If the version number is v1, then we write the data in the list of responses. + + /!\ This function updates a response if it was already present in the databag! + + Args: + relation_id: The specific relation id for that event. + response: The response to write in the databag. + """ + if not self.charm.unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + raise ValueError("Missing relation.") + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + if version == "v0": + # Ensure the request_id is None + response.request_id = None + self.interface.write_model( + relation_id, response, context={"version": "v0"} + ) # {"database": "database-name", "secret-user": "uri", ...} + return + + model = self.interface.build_model(relation_id, DataContractV1[response.__class__]) + + # for/else syntax allows to execute the else if break was not called. + # This allows us to update or append easily. + for index, _response in enumerate(model.requests): + if _response.request_id == response.request_id: + model.requests[index] = response + break + else: + model.requests.append(response) + + self.interface.write_model(relation_id, model) + return + + +class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): + """Event Handler for resource requirer.""" + + on = ResourceRequiresEvents[TResourceProviderModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + requests: list[RequirerCommonModel], + response_model: type[TResourceProviderModel], + unique_key: str = "", + relation_aliases: list[str] | None = None, + ): + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.unit + self.relation_aliases = relation_aliases + self._requests = requests + self.response_model = DataContractV1[response_model] + self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( + OpsRelationRepositoryInterface(charm, relation_name, self.response_model) + ) + + if requests: + self._request_model = requests[0].__class__ + else: + self._request_model = RequirerCommonModel + + # First, check that the number of aliases matches the one defined in charm metadata. + if self.relation_aliases: + relation_connection_limit = self.charm.meta.requires[relation_name].limit + if len(self.relation_aliases) != relation_connection_limit: + raise ValueError( + f"Invalid number of aliases, expected {relation_connection_limit}, received {len(self.relation_aliases)}" + ) + + # Created custom event names for each alias. + if self.relation_aliases: + for relation_alias in self.relation_aliases: + self.on.define_event( + f"{relation_alias}_resource_created", + ResourceCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_resource_entity_created", + ResourceEntityCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_endpoints_changed", + ResourceEndpointsChangedEvent, + ) + self.on.define_event( + f"{relation_alias}_read_only_endpoints_changed", + ResourceReadOnlyEndpointsChangedEvent, + ) + + ############################################################################## + # Extra useful functions + ############################################################################## + def is_resource_created( + self, + rel_id: int, + request_id: str, + model: DataContractV1[TResourceProviderModel] | None = None, + ) -> bool: + """Checks if a resource has been created or not. + + Args: + rel_id: The relation id to check. + request_id: The specific request id to check. + model: An optional model to use (for performances). + """ + if not model: + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + for request in model.requests: + if request.request_id == request_id: + return request.secret_user is not None or request.secret_entity is not None + return False + + def are_all_resources_created(self, rel_id: int) -> bool: + """Checks that all resources have been created for a relation. + + Args: + rel_id: The relation id to check. + """ + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + return all( + self.is_resource_created(rel_id, request.request_id, model) + for request in model.requests + if request.request_id + ) + + @staticmethod + def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: + # Actual checking method. + # No need to check for psycopg here, it's been checked before. + if not psycopg2: + return False + + try: + with psycopg2.connect(connection_string) as connection: + with connection.cursor() as cursor: + cursor.execute( + "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) + ) + return cursor.fetchone() is not None + except psycopg2.Error as e: + logger.exception( + f"failed to check whether {plugin} plugin is enabled in the database: %s", + str(e), + ) + return False + + def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: + """Returns whether a plugin is enabled in the database. + + Args: + plugin: name of the plugin to check. + relation_index: Optional index to check the database (default: 0 - first relation). + """ + if not psycopg2: + return False + + # Can't check a non existing relation. + if len(self.relations) <= relation_index: + return False + + relation = self.relations[relation_index] + model = self.interface.build_model(relation_id=relation.id, component=relation.app) + for request in model.requests: + if request.endpoints and request.username and request.password: + host = request.endpoints.split(":")[0] + username = request.username.get_secret_value() + password = request.password.get_secret_value() + + connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" + return self._is_pg_plugin_enabled(plugin, connection_string) + logger.info("No valid request to use to check for plugin.") + return False + + ############################################################################## + # Helpers for aliases + ############################################################################## + + def _assign_relation_alias(self, relation_id: int) -> None: + """Assigns an alias to a relation. + + This function writes in the unit data bag. + + Args: + relation_id: the identifier for a particular relation. + """ + # If no aliases were provided, return immediately. + if not self.relation_aliases: + return + + # Return if an alias was already assigned to this relation + # (like when there are more than one unit joining the relation). + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation and relation.data[self.charm.unit].get("alias"): + return + + # Retrieve the available aliases (the ones that weren't assigned to any relation). + available_aliases = self.relation_aliases[:] + for relation in self.charm.model.relations[self.relation_name]: + alias = relation.data[self.charm.unit].get("alias") + if alias: + logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) + available_aliases.remove(alias) + + # Set the alias in the unit relation databag of the specific relation. + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation: + relation.data[self.charm.unit].update({"alias": available_aliases[0]}) + + # We need to set relation alias also on the application level so, + # it will be accessible in show-unit juju command, executed for a consumer application unit + if relation and self.charm.unit.is_leader(): + relation.data[self.charm.app].update({"alias": available_aliases[0]}) + + def _emit_aliased_event( + self, event: RelationChangedEvent, event_name: str, response: ResourceProviderModel + ): + """Emit all aliased events.""" + alias = self._get_relation_alias(event.relation.id) + if alias: + getattr(self.on, f"{alias}_{event_name}").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + + def _get_relation_alias(self, relation_id: int) -> str | None: + """Gets the relation alias for a relation id.""" + for relation in self.charm.model.relations[self.relation_name]: + if relation.id == relation_id: + return relation.data[self.charm.unit].get("alias") + return None + + ############################################################################## + # Event Handlers + ############################################################################## + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + + response_model = self.interface.build_model(relation.id) + if not short_uuid: + return + for _response in response_model.requests: + if _response.request_id == short_uuid: + response = _response + break + else: + logger.info(f"Unknown request id {short_uuid}") + return + + getattr(self.on, "authentication_updated").emit( + relation, + app=relation.app, + unit=remote_unit, + response=response, + ) + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the database relation is created.""" + super()._on_relation_created_event(event) + + repository = OpsRelationRepository(self.model, event.relation, self.charm.app) + + # If relations aliases were provided, assign one to the relation. + self._assign_relation_alias(event.relation.id) + + if not self.charm.unit.is_leader(): + return + + # Generate all requests id so they are saved already. + for request in self._requests: + request.request_id = gen_hash(request.resource, request.salt) + + full_request = RequirerDataContractV1[self._request_model]( + version="v1", requests=self._requests + ) + write_model(repository, full_request) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the database relation has changed.""" + is_subordinate = False + remote_unit_data = None + for key in event.relation.data.keys(): + if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): + remote_unit_data = event.relation.data[key] + elif isinstance(key, Application) and key.name != self.charm.app.name: + is_subordinate = event.relation.data[key].get("subordinated") == "true" + + if is_subordinate: + if not remote_unit_data or remote_unit_data.get("state") != "ready": + return + + repository = self.interface.repository(event.relation.id, event.app) + response_model = self.interface.build_model(event.relation.id, component=event.app) + + if not response_model.requests: + logger.info("Still waiting for data.") + return + + data = repository.get_field("data") + if not data: + logger.info("Missing data to compute diffs") + return + + request_map = TypeAdapter(dict[str, self._request_model]).validate_json(data) + + for response in response_model.requests: + response_id = response.request_id or gen_hash(response.resource, response.salt) + request = request_map.get(response_id, None) + if not request: + raise ValueError( + f"No request matching the response with response_id {response_id}" + ) + self._handle_event(event, repository, request, response) + + ############################################################################## + # Methods to handle specificities of relation events + ############################################################################## + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: OpsRelationRepository, + request: RequirerCommonModel, + response: ResourceProviderModel, + ): + _diff = self.compute_diff(event.relation, response, repository, store=True) + + for newval in _diff.added: + if secret_group := response._get_secret_field(newval): + uri = getattr(response, newval.replace("-", "_")) + repository.register_secret(uri, secret_group, response.request_id) + + if "secret-user" in _diff.added and not request.entity_type: + logger.info(f"resource {response.resource} created at {datetime.now()}") + getattr(self.on, "resource_created").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "resource_created", response) + return + + if "secret-entity" in _diff.added and request.entity_type: + logger.info(f"entity {response.entity_name} created at {datetime.now()}") + getattr(self.on, "resource_entity_created").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "resource_entity_created", response) + return + + if "endpoints" in _diff.added or "endpoints" in _diff.changed: + logger.info(f"endpoints changed at {datetime.now()}") + getattr(self.on, "endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "endpoints_changed", response) + return + + if "read-only-endpoints" in _diff.added or "read-only-endpoints" in _diff.changed: + logger.info(f"read-only-endpoints changed at {datetime.now()}") + getattr(self.on, "read_only_endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "read_only_endpoints_changed", response) + return diff --git a/tests/v1/integration/application-charm/poetry.lock b/tests/v1/integration/application-charm/poetry.lock index b6f8b787..e15e1d29 100644 --- a/tests/v1/integration/application-charm/poetry.lock +++ b/tests/v1/integration/application-charm/poetry.lock @@ -38,14 +38,14 @@ type = ["pytest-mypy"] [[package]] name = "opentelemetry-api" -version = "1.36.0" +version = "1.37.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.9" groups = ["main", "charm-libs"] files = [ - {file = "opentelemetry_api-1.36.0-py3-none-any.whl", hash = "sha256:02f20bcacf666e1333b6b1f04e647dc1d5111f86b8e510238fcc56d7762cda8c"}, - {file = "opentelemetry_api-1.36.0.tar.gz", hash = "sha256:9a72572b9c416d004d492cbc6e61962c0501eaf945ece9b5a0f56597d8348aa0"}, + {file = "opentelemetry_api-1.37.0-py3-none-any.whl", hash = "sha256:accf2024d3e89faec14302213bc39550ec0f4095d1cf5ca688e1bfb1c8612f47"}, + {file = "opentelemetry_api-1.37.0.tar.gz", hash = "sha256:540735b120355bd5112738ea53621f8d5edb35ebcd6fe21ada3ab1c61d1cd9a7"}, ] [package.dependencies] @@ -74,16 +74,36 @@ websocket-client = "==1.*" testing = ["ops-scenario (==7.23.1)"] tracing = ["ops-tracing (==2.23.1)"] +[[package]] +name = "psycopg2" +version = "2.9.10" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.8" +groups = ["main", "charm-libs"] +files = [ + {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, + {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, + {file = "psycopg2-2.9.10-cp311-cp311-win32.whl", hash = "sha256:47c4f9875125344f4c2b870e41b6aad585901318068acd01de93f3677a6522c2"}, + {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, + {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, + {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, + {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"}, + {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, + {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, + {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, +] + [[package]] name = "pydantic" -version = "2.11.7" +version = "2.11.9" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" groups = ["main", "charm-libs"] files = [ - {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, - {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, + {file = "pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2"}, + {file = "pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2"}, ] [package.dependencies] @@ -338,4 +358,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.10.12" -content-hash = "3e90339ce16375daef4d9549ee45a773e9691caf1f8e2141decd3454017036e5" +content-hash = "33b42b6706f332be97f99c3f84eda369f2a2ef185c09d5fd794a3daea17b55f3" diff --git a/tests/v1/integration/application-charm/src/charm.py b/tests/v1/integration/application-charm/src/charm.py index d6c4fded..b7167b7a 100755 --- a/tests/v1/integration/application-charm/src/charm.py +++ b/tests/v1/integration/application-charm/src/charm.py @@ -43,6 +43,8 @@ class ExtendedResponseModel(ResourceProviderModel): topsecret: ExtraSecretStr = Field(default=None) donttellanyone: ExtraSecretStr = Field(default=None) + new_field_req: str | None = Field(default=None) + new_field2_req: str | None = Field(default=None) class ApplicationCharm(CharmBase): @@ -53,7 +55,7 @@ def __init__(self, *args): # Default charm events. self.framework.observe(self.on.start, self._on_start) - # self.framework.observe(self.on.get_plugin_status_action, self._on_get_plugin_status) + self.framework.observe(self.on.get_plugin_status_action, self._on_get_plugin_status) # Events related to the first database that is requested # (these events are defined in the database requires charm library). @@ -68,7 +70,7 @@ def __init__(self, *args): ) self.first_database_roles = ResourceRequirerEventHandler( self, - "first_database-roles", + "first-database-roles", requests=[ RequirerCommonModel( resource=database_name, entity_type="USER", extra_user_roles=EXTRA_USER_ROLES @@ -258,10 +260,10 @@ def __init__(self, *args): self.framework.observe( self.opensearch.on.resource_created, self._on_opensearch_index_created ) - # TODO: investigate authentication updated. - # self.framework.observe( - # self.opensearch.on.authentication_updated, self._on_opensearch_authentication_updated - # ) + + self.framework.observe( + self.opensearch.on.authentication_updated, self._on_opensearch_authentication_updated + ) self.opensearch_roles = ResourceRequirerEventHandler( charm=self, @@ -416,17 +418,17 @@ def _on_cluster2_endpoints_changed(self, event: ResourceEndpointsChangedEvent) - """Event triggered when the read/write endpoints of the database change.""" logger.info(f"cluster2 endpoints have been changed to: {event.response.endpoints}") - # def _on_get_plugin_status(self, event: ActionEvent) -> None: - # """Returns the PostgreSQL plugin status (enabled/disabled).""" - # plugin = event.params.get("plugin") - # if not plugin: - # event.fail("Please provide a plugin name") - # return - - # plugin_status = ( - # "enabled" if self.first_database.is_postgresql_plugin_enabled(plugin) else "disabled" - # ) - # event.set_results({"plugin-status": plugin_status}) + def _on_get_plugin_status(self, event: ActionEvent) -> None: + """Returns the PostgreSQL plugin status (enabled/disabled).""" + plugin = event.params.get("plugin") + if not plugin: + event.fail("Please provide a plugin name") + return + + plugin_status = ( + "enabled" if self.first_database.is_postgresql_plugin_enabled(plugin) else "disabled" + ) + event.set_results({"plugin-status": plugin_status}) def _on_kafka_bootstrap_server_changed(self, event: ResourceEndpointsChangedEvent): """Event triggered when a bootstrap server was changed for this application.""" diff --git a/tests/v1/integration/backward-compatibility-charm/charmcraft.yaml b/tests/v1/integration/backward-compatibility-charm/charmcraft.yaml index b4317391..ad2c056e 100644 --- a/tests/v1/integration/backward-compatibility-charm/charmcraft.yaml +++ b/tests/v1/integration/backward-compatibility-charm/charmcraft.yaml @@ -2,12 +2,11 @@ # See LICENSE file for licensing details. type: charm -# Whenever "bases" is changed: -# - Update tests/integration/conftest.py::pytest_configure() -# - Update .github/workflow/ci.yaml integration-test matrix +base: ubuntu@22.04 platforms: - ubuntu@22.04:amd64: - ubuntu@24.04:amd64: + amd64: + build-on: [amd64] + build-for: [amd64] # Files implicitly created by charmcraft without a part: # - dispatch (https://github.com/canonical/charmcraft/pull/1898) @@ -59,14 +58,7 @@ parts: - libssl-dev # Needed to build Python dependencies with Rust from source - pkg-config # Needed to build Python dependencies with Rust from source override-build: | - # Workaround for https://github.com/canonical/charmcraft/issues/2068 - # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source - if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] - then - snap install rustup --classic - else - apt-get install rustup -y - fi + snap install rustup --classic # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu # archive—which means the rustup version could be updated at any time. Print rustup version diff --git a/tests/v1/integration/database-charm/charmcraft.yaml b/tests/v1/integration/database-charm/charmcraft.yaml index a07cac5e..cfa355bd 100644 --- a/tests/v1/integration/database-charm/charmcraft.yaml +++ b/tests/v1/integration/database-charm/charmcraft.yaml @@ -2,12 +2,11 @@ # See LICENSE file for licensing details. type: charm -# Whenever "bases" is changed: -# - Update tests/integration/conftest.py::pytest_configure() -# - Update .github/workflow/ci.yaml integration-test matrix +base: ubuntu@22.04 platforms: - ubuntu@22.04:amd64: - ubuntu@24.04:amd64: + amd64: + build-on: [amd64] + build-for: [amd64] # Files implicitly created by charmcraft without a part: # - dispatch (https://github.com/canonical/charmcraft/pull/1898) @@ -60,14 +59,7 @@ parts: - pkg-config # Needed to build Python dependencies with Rust from source - libpq-dev override-build: | - # Workaround for https://github.com/canonical/charmcraft/issues/2068 - # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source - if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] - then - snap install rustup --classic - else - apt-get install rustup -y - fi + snap install rustup --classic # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu # archive—which means the rustup version could be updated at any time. Print rustup version diff --git a/tests/v1/integration/database-charm/src/charm.py b/tests/v1/integration/database-charm/src/charm.py index 15965a46..d152d3e0 100755 --- a/tests/v1/integration/database-charm/src/charm.py +++ b/tests/v1/integration/database-charm/src/charm.py @@ -21,7 +21,8 @@ from ops.framework import StoredState from ops.main import main from ops.model import ActiveStatus, MaintenanceStatus -from pydantic import Field, SecretStr, TypeAdapter +from pydantic import Field, SecretStr +from pydantic.types import _SecretBase from charms.data_platform_libs.v1.data_interfaces import ( DataContractV1, @@ -54,15 +55,23 @@ class PeerAppModel(PeerModel): secret_field: ExtraSecretStr mygroup_field1: MygroupSecretStr = Field(default=None) mygroup_field2: MygroupSecretStr = Field(default=None) + not_a_secret: str | None = Field(default=None) class PeerUnitModel(PeerModel): monitor_password: ExtraSecretStr secret_field: ExtraSecretStr my_unit_secret: ExtraSecretStr + not_a_secret: str | None = Field(default=None) -DataContract = TypeAdapter(DataContractV1[ResourceProviderModel]) +class ExtendedResourceProviderModel(ResourceProviderModel): + topsecret: ExtraSecretStr = Field(default=None) + new_field: str | None = Field(default=None) + new_field2: str | None = Field(default=None) + + +DataContract = DataContractV1[ExtendedResourceProviderModel] class DatabaseCharm(CharmBase): @@ -145,9 +154,7 @@ def peer_units_data_interfaces(self) -> dict[Unit, OpsOtherPeerUnitRepositoryInt for unit in self.peer_relation.units: if unit not in self._servers_data: self._servers_data[unit] = OpsOtherPeerUnitRepositoryInterface( - charm=self, - relation_name=PEER, - unit=unit, + charm=self, relation_name=PEER, unit=unit, model=PeerUnitModel ) return self._servers_data @@ -221,7 +228,7 @@ def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: cursor.execute(f"GRANT ALL PRIVILEGES ON DATABASE {resource} TO {username};") # Add the roles to the user. if extra_user_roles: - cursor.execute(f"ALTER USER {username} {' '.join(extra_user_roles)};") + cursor.execute(f"ALTER USER {username} {extra_user_roles};") # Get the database version. cursor.execute("SELECT version();") version = cursor.fetchone()[0] @@ -280,11 +287,11 @@ def _on_resource_entity_requested(self, event: ResourceEntityRequestedEvent) -> if entity_type == "user": extra_roles = request.extra_user_roles cursor.execute(f"CREATE ROLE {rolename} WITH ENCRYPTED PASSWORD '{password}';") - cursor.execute(f"ALTER ROLE {rolename} {' '.join(extra_roles)};") + cursor.execute(f"ALTER ROLE {rolename} {extra_roles};") if entity_type == "group": extra_roles = request.extra_group_roles cursor.execute(f"CREATE ROLE {rolename};") - cursor.execute(f"ALTER ROLE {rolename} {' '.join(extra_roles)};") + cursor.execute(f"ALTER ROLE {rolename} {extra_roles};") # Share the credentials with the application. response = ResourceProviderModel( @@ -307,32 +314,38 @@ def _on_get_relation_field(self, event: ActionEvent): """[second_database]: Get requested relation field.""" relation = self._get_relation(event.params["relation_id"]) value = None - repository = self.database.interface.repository(relation.id, component=relation.app) - - repository.get_field(event.params["field"]) + model = self.database.interface.build_model(relation.id) + for request in model.requests: + value = getattr(request, event.params["field"].replace("-", "_")) + value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value event.set_results({"value": value if value else ""}) def _on_get_relation_self_side_field(self, event: ActionEvent): """[second_database]: Get requested relation field.""" relation = self._get_relation(event.params["relation_id"]) value = None - repository = self.database.interface.repository(relation.id) - - repository.get_field(event.params["field"]) + model = self.database.interface.build_model(relation.id) + for request in model.requests: + value = getattr(request, event.params["field"].replace("-", "_")) + value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value event.set_results({"value": value if value else ""}) def _on_set_relation_field(self, event: ActionEvent): """Set requested relation field.""" relation = self._get_relation(event.params["relation_id"]) - repository = self.database.interface.repository(relation.id) - repository.write_field(event.params["field"], event.params["value"]) + model = self.database.interface.build_model(relation.id) + for request in model.requests: + setattr(request, event.params["field"].replace("-", "_"), event.params["value"]) + self.database.interface.write_model(relation.id, model) def _on_delete_relation_field(self, event: ActionEvent): """Delete requested relation field.""" relation = self._get_relation(event.params["relation_id"]) - repository = self.database.interface.repository(relation.id) - repository.delete_field(event.params["field"]) + model = self.database.interface.build_model(relation.id) + for request in model.requests: + setattr(request, event.params["field"].replace("-", "_"), None) # Charms should be compatible with old vesrions, to simulatrams["field"]) + self.database.interface.write_model(relation.id, model) def _new_rolename(self) -> str: """Generate a random rolename string. @@ -362,11 +375,12 @@ def _on_get_peer_relation_field(self, event: ActionEvent): if component == "app": relation = self._peer_relation_app.relations[0] model = self._peer_relation_app.build_model(relation.id) - value = getattr(model, event.params["field"]) + value = getattr(model, event.params["field"].replace("-", "_")) else: relation = self._peer_relation_unit.relations[0] model = self._peer_relation_unit.build_model(relation.id) - value = getattr(model, event.params["field"]) + value = getattr(model, event.params["field"].replace("-", "_")) + value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value event.set_results({"value": value if value else ""}) def _on_set_peer_relation_field(self, event: ActionEvent): @@ -375,12 +389,12 @@ def _on_set_peer_relation_field(self, event: ActionEvent): if component == "app": relation = self._peer_relation_app.relations[0] model = self._peer_relation_app.build_model(relation.id) - setattr(model, event.params["field"], event.params["value"]) + setattr(model, event.params["field"].replace("-", "_"), event.params["value"]) self._peer_relation_app.write_model(relation.id, model) else: relation = self._peer_relation_unit.relations[0] model = self._peer_relation_unit.build_model(relation.id) - setattr(model, event.params["field"], event.params["value"]) + setattr(model, event.params["field"].replace("-", "_"), event.params["value"]) self._peer_relation_unit.write_model(relation.id, model) def _on_set_peer_relation_field_multiple(self, event: ActionEvent): @@ -423,12 +437,12 @@ def _on_delete_peer_relation_field(self, event: ActionEvent): if component == "app": relation = self._peer_relation_app.relations[0] model = self._peer_relation_app.build_model(relation.id) - setattr(model, event.params["field"], None) + setattr(model, event.params["field"].replace("-", "_"), None) self._peer_relation_app.write_model(relation.id, model) else: relation = self._peer_relation_unit.relations[0] model = self._peer_relation_unit.build_model(relation.id) - setattr(model, event.params["field"], None) + setattr(model, event.params["field"].replace("-", "_"), None) self._peer_relation_unit.write_model(relation.id, model) # Other Peer Data @@ -440,9 +454,13 @@ def _on_get_other_peer_relation_field(self, event: ActionEvent): event.fail("Missing relation") return for unit, interface in self.peer_units_data_interfaces.items(): - value[unit.name.replace("/", "-")] = interface.repository(relation.id).get_field( - event.params["field"] + model = interface.build_model(relation.id) + value[unit.name.replace("/", "-")] = getattr( + model, event.params["field"].replace("-", "_") ) + for key, item in value.items(): + item = item.get_secret_value() if issubclass(item.__class__, _SecretBase) else item + value[key] = item event.set_results(value) # Remove peer secrets diff --git a/tests/v1/integration/dummy-database-charm/charmcraft.yaml b/tests/v1/integration/dummy-database-charm/charmcraft.yaml index a07cac5e..cfa355bd 100644 --- a/tests/v1/integration/dummy-database-charm/charmcraft.yaml +++ b/tests/v1/integration/dummy-database-charm/charmcraft.yaml @@ -2,12 +2,11 @@ # See LICENSE file for licensing details. type: charm -# Whenever "bases" is changed: -# - Update tests/integration/conftest.py::pytest_configure() -# - Update .github/workflow/ci.yaml integration-test matrix +base: ubuntu@22.04 platforms: - ubuntu@22.04:amd64: - ubuntu@24.04:amd64: + amd64: + build-on: [amd64] + build-for: [amd64] # Files implicitly created by charmcraft without a part: # - dispatch (https://github.com/canonical/charmcraft/pull/1898) @@ -60,14 +59,7 @@ parts: - pkg-config # Needed to build Python dependencies with Rust from source - libpq-dev override-build: | - # Workaround for https://github.com/canonical/charmcraft/issues/2068 - # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source - if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] - then - snap install rustup --classic - else - apt-get install rustup -y - fi + snap install rustup --classic # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu # archive—which means the rustup version could be updated at any time. Print rustup version diff --git a/tests/v1/integration/dummy-database-charm/lib/charms/data_platform_libs/v1/data_interfaces.py b/tests/v1/integration/dummy-database-charm/lib/charms/data_platform_libs/v1/data_interfaces.py new file mode 100644 index 00000000..e22388d1 --- /dev/null +++ b/tests/v1/integration/dummy-database-charm/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -0,0 +1,2753 @@ +# Copyright 2025 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +r"""Library to manage the relation for the data-platform products. + +This V1 has been specified in https://docs.google.com/document/d/1lnuonWnoQb36RWYwfHOBwU0VClLbawpTISXIC_yNKYo, and should be backward compatible with v0 clients. + +This library contains the Requires and Provides classes for handling the relation +between an application and multiple managed application supported by the data-team: +MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. + +#### Models + +This library exposes basic default models that can be used in most cases. +If you need more complex models, you can subclass them. + +```python +from charms.data_platform_libs.v1.data_interfaces import RequirerCommonModel, ExtraSecretStr + +class ExtendedCommonModel(RequirerCommonModel): + operator_password: ExtraSecretStr +``` + +Secret groups are handled using annotated types. If you wish to add extra secret groups, please follow the following model. The string metadata represents the secret group name, and `OptionalSecretStr` is a TypeAlias for `SecretStr | None`. Finally, `SecretStr` represents a field validating the URI pattern `secret:.*` + +```python +MyGroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] +``` + +Fields not specified as OptionalSecretStr and extended with a group name in the metadata will NOT get serialised. + + +#### Requirer Charm + +This library is a uniform interface to a selection of common database +metadata, with added custom events that add convenience to database management, +and methods to consume the application related data. + + +```python +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ClientCharm(CharmBase): + # Database charm that accepts connections from application charms. + def __init__(self, *args) -> None: + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + RequirerCommonModel( + entity_type="USER", + ) + ] + self.database = ResourceRequirerEventHandler( + self,"database", requests, response_model=ResourceProviderModel + ) + self.framework.observe(self.database.on.resource_created, self._on_resource_created) + self.framework.observe(self.database.on.resource_entity_created, self._on_resource_entity_created) + + def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new database is created. + relation_id = event.relation.id + response = event.response # This is the response model + + username = event.response.username + password = event.response.password + ... + + def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new entity is created. + ... + +Compared to V1, this library makes heavy use of pydantic models, and allows for +multiple requests, specified as a list. +On the Requirer side, each response will trigger one custom event for that response. +This way, it allows for more strategic events to be emitted according to the request. + +As show above, the library provides some custom events to handle specific situations, which are listed below: +- resource_created: event emitted when the requested database is created. +- resource_entity_created: event emitted when the requested entity is created. +- endpoints_changed: event emitted when the read/write endpoints of the database have changed. +- read_only_endpoints_changed: event emitted when the read-only endpoints of the database + have changed. Event is not triggered if read/write endpoints changed too. + +If it is needed to connect multiple database clusters to the same relation endpoint +the application charm can implement the same code as if it would connect to only +one database cluster (like the above code example). + +To differentiate multiple clusters connected to the same relation endpoint +the application charm can use the name of the remote application: + +```python + +def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Get the remote app name of the cluster that triggered this event + cluster = event.relation.app.name +``` + +It is also possible to provide an alias for each different database cluster/relation. + +So, it is possible to differentiate the clusters in two ways. +The first is to use the remote application name, i.e., `event.relation.app.name`, as above. + +The second way is to use different event handlers to handle each cluster events. +The implementation would be something like the following code: + +```python + +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ApplicationCharm(CharmBase): + # Application charm that connects to database charms. + + def __init__(self, *args): + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + ] + # Define the cluster aliases and one handler for each cluster database created event. + self.database = ResourceRequirerEventHandler( + self, + relation_name="database" + relations_aliases = ["cluster1", "cluster2"], + requests= + ) + self.framework.observe( + self.database.on.cluster1_resource_created, self._on_cluster1_resource_created + ) + self.framework.observe( + self.database.on.cluster2_resource_created, self._on_cluster2_resource_created + ) + + def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster1 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... + + def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster2 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... +``` + +### Provider Charm + +Following an example of using the ResourceRequestedEvent, in the context of the +database charm code: + +```python +from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides + +class SampleCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + # Charm events defined in the database provides charm library. + self.provided_database = DatabaseProvides(self, relation_name="database") + self.framework.observe(self.provided_database.on.database_requested, + self._on_database_requested) + # Database generic helper + self.database = DatabaseHelper() + + def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: + # Handle the event triggered by a new database requested in the relation + # Retrieve the database name using the charm library. + db_name = event.database + # generate a new user credential + username = self.database.generate_user() + password = self.database.generate_password() + # set the credentials for the relation + self.provided_database.set_credentials(event.relation.id, username, password) + # set other variables for the relation event.set_tls("False") +``` + +As shown above, the library provides a custom event (database_requested) to handle +the situation when an application charm requests a new database to be created. +It's preferred to subscribe to this event instead of relation changed event to avoid +creating a new database when other information other than a database name is +exchanged in the relation databag. + +""" + +import copy +import hashlib +import json +import logging +import pickle +import random +import string +from abc import ABC, abstractmethod +from collections import namedtuple +from datetime import datetime +from enum import Enum +from typing import ( + Annotated, + Any, + ClassVar, + Generic, + Literal, + NewType, + TypeAlias, + TypeVar, + final, + overload, +) + +from ops import ( + CharmBase, + EventBase, + Model, + RelationChangedEvent, + RelationCreatedEvent, + RelationEvent, + Secret, + SecretChangedEvent, + SecretInfo, + SecretNotFoundError, +) +from ops.charm import CharmEvents +from ops.framework import EventSource, Handle, Object +from ops.model import Application, ModelError, Relation, Unit +from pydantic import ( + AfterValidator, + AliasChoices, + BaseModel, + ConfigDict, + Discriminator, + Field, + SecretStr, + SerializationInfo, + SerializerFunctionWrapHandler, + Tag, + TypeAdapter, + ValidationInfo, + model_serializer, + model_validator, +) +from pydantic.types import _SecretBase, _SecretField +from pydantic_core import CoreSchema, core_schema +from typing_extensions import TypeAliasType, override + +try: + import psycopg2 +except ImportError: + psycopg2 = None + +# The unique Charmhub library identifier, never change it +LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" + +# Increment this major API version when introducing breaking changes +LIBAPI = 1 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 0 + +PYDEPS = ["ops>=2.0.0", "pydantic>=2.11"] + +logger = logging.getLogger(__name__) + +MODEL_ERRORS = { + "not_leader": "this unit is not the leader", + "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", + "owner_no_refresh": "ERROR secret owner cannot use --refresh", +} + +RESOURCE_ALIASES = [ + "database", + "subject", + "topic", + "index", + "plugin-url", +] + +SECRET_PREFIX = "secret-" + + +############################################################################## +# Exceptions +############################################################################## + + +class DataInterfacesError(Exception): + """Common ancestor for DataInterfaces related exceptions.""" + + +class SecretError(DataInterfacesError): + """Common ancestor for Secrets related exceptions.""" + + +class SecretAlreadyExistsError(SecretError): + """A secret that was to be added already exists.""" + + +class SecretsUnavailableError(SecretError): + """Secrets aren't yet available for Juju version used.""" + + +class IllegalOperationError(DataInterfacesError): + """To be used when an operation is not allowed to be performed.""" + + +############################################################################## +# Global helpers / utilities +############################################################################## + + +def gen_salt() -> str: + """Generates a consistent salt.""" + return "".join(random.choices(string.ascii_letters + string.digits, k=16)) + + +def gen_hash(resource_name: str, salt: str) -> str: + """Generates a consistent hash based on the resource name and salt.""" + hasher = hashlib.sha256() + hasher.update(f"{resource_name}:{salt}".encode()) + return hasher.hexdigest()[:16] + + +def ensure_leader_for_app(f): + """Decorator to ensure that only leader can perform given operation.""" + + def wrapper(self, *args, **kwargs): + if self.component == self._local_app and not self._local_unit.is_leader(): + logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") + return + return f(self, *args, **kwargs) + + wrapper.leader_only = True + return wrapper + + +def get_encoded_dict( + relation: Relation, member: Unit | Application, field: str +) -> dict[str, Any] | None: + """Retrieve and decode an encoded field from relation data.""" + data = json.loads(relation.data[member].get(field, "{}")) + if isinstance(data, dict): + return data + logger.error("Unexpected datatype for %s instead of dict.", str(data)) + + +Diff = namedtuple("Diff", ["added", "changed", "deleted"]) +Diff.__doc__ = """ +A tuple for storing the diff between two data mappings. + +added - keys that were added +changed - keys that still exist but have new values +deleted - key that were deleted""" + + +def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: + """Retrieves the diff of the data in the relation changed databag for v1. + + Args: + old_data: dictionary of the stored data before the event. + new_data: dictionary of the received data to compute the diff. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + old_data = old_data or {} + + # These are the keys that were added to the databag and triggered this event. + added = new_data.keys() - old_data.keys() + # These are the keys that were removed from the databag and triggered this event. + deleted = old_data.keys() - new_data.keys() + # These are the keys that already existed in the databag, + # but had their values changed. + changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} + # Return the diff with all possible changes. + return Diff(added, changed, deleted) + + +def resource_added(diff: Diff) -> bool: + """Ensures that one of the aliased resources has been added.""" + return any(item in diff.added for item in RESOURCE_ALIASES + ["resource"]) + + +def store_new_data( + relation: Relation, + component: Unit | Application, + new_data: dict[str, str], + short_uuid: str | None = None, +): + """Stores the new data in the databag for diff computation.""" + # First, the case for V0 + if not short_uuid: + relation.data[component].update({"data": json.dumps(new_data)}) + # Then the case for V1, where we have a ShortUUID + else: + data = json.loads(relation.data[component].get("data", "{}")) + if not isinstance(data, dict): + raise ValueError + newest_data = copy.deepcopy(data) + newest_data[short_uuid] = new_data + relation.data[component].update({"data": json.dumps(newest_data)}) + + +############################################################################## +# Helper classes +############################################################################## + +SecretGroup = NewType("SecretGroup", str) + + +SecretString = TypeAliasType("SecretString", Annotated[str, Field(pattern="secret:.*")]) + + +class SecretBool(_SecretField[bool]): + """Class for booleans as secrets.""" + + _inner_schema: ClassVar[CoreSchema] = core_schema.bool_schema() + _error_kind: ClassVar[str] = "bool_type" + + def _display(self) -> str: + return "****" + + +OptionalSecretStr: TypeAlias = SecretStr | None +OptionalSecretBool: TypeAlias = SecretBool | None + +OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) + +UserSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "user"] +TlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "tls"] +TlsSecretBool = Annotated[OptionalSecretBool, Field(exclude=True, default=None), "tls"] +MtlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mtls"] +ExtraSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "extra"] +EntitySecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "entity"] + + +class Scope(Enum): + """Peer relations scope.""" + + APP = "app" + UNIT = "unit" + + +class CachedSecret: + """Locally cache a secret. + + The data structure is precisely reusing/simulating as in the actual Secret Storage + """ + + KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] + + def __init__( + self, + model: Model, + component: Application | Unit, + label: str, + secret_uri: str | None = None, + ): + self._secret_meta = None + self._secret_content = {} + self._secret_uri = secret_uri + self.label = label + self._model = model + self.component = component + self.current_label = None + + @property + def meta(self) -> Secret | None: + """Getting cached secret meta-information.""" + if not self._secret_meta: + if not (self._secret_uri or self.label): + return + + try: + self._secret_meta = self._model.get_secret(label=self.label) + except SecretNotFoundError: + # Falling back to seeking for potential legacy labels + logger.info(f"Secret with label {self.label} not found") + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) + return self._secret_meta + + ########################################################################## + # Public functions + ########################################################################## + + def add_secret( + self, + content: dict[str, str], + relation: Relation | None = None, + label: str | None = None, + ) -> Secret: + """Create a new secret.""" + if self._secret_uri: + raise SecretAlreadyExistsError( + "Secret is already defined with uri %s", self._secret_uri + ) + + label = self.label if not label else label + + secret = self.component.add_secret(content, label=label) + if relation and relation.app != self._model.app: + # If it's not a peer relation, grant is to be applied + secret.grant(relation) + self._secret_uri = secret.id + self._secret_meta = secret + return self._secret_meta + + def get_content(self) -> dict[str, str]: + """Getting cached secret content.""" + if not self._secret_content: + if self.meta: + try: + self._secret_content = self.meta.get_content(refresh=True) + except (ValueError, ModelError) as err: + # https://bugs.launchpad.net/juju/+bug/2042596 + # Only triggered when 'refresh' is set + if isinstance(err, ModelError) and not any( + msg in str(err) for msg in self.KNOWN_MODEL_ERRORS + ): + raise + # Due to: ValueError: Secret owner cannot use refresh=True + self._secret_content = self.meta.get_content() + return self._secret_content + + def set_content(self, content: dict[str, str]) -> None: + """Setting cached secret content.""" + if not self.meta: + return + + if content == self.get_content(): + return + + if content: + self.meta.set_content(content) + self._secret_content = content + else: + self.meta.remove_all_revisions() + + def get_info(self) -> SecretInfo | None: + """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" + if self.meta: + return self.meta.get_info() + + def remove(self) -> None: + """Remove secret.""" + if not self.meta: + raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") + try: + self.meta.remove_all_revisions() + except SecretNotFoundError: + pass + self._secret_content = {} + self._secret_meta = None + self._secret_uri = None + + +class SecretCache: + """A data structure storing CachedSecret objects.""" + + def __init__(self, model: Model, component: Application | Unit): + self._model = model + self.component = component + self._secrets: dict[str, CachedSecret] = {} + + def get(self, label: str, uri: str | None = None) -> CachedSecret | None: + """Getting a secret from Juju Secret store or cache.""" + if not self._secrets.get(label): + secret = CachedSecret(self._model, self.component, label, uri) + if secret.meta: + self._secrets[label] = secret + return self._secrets.get(label) + + def add(self, label: str, content: dict[str, str], relation: Relation) -> CachedSecret: + """Adding a secret to Juju Secret.""" + if self._secrets.get(label): + raise SecretAlreadyExistsError(f"Secret {label} already exists") + + secret = CachedSecret(self._model, self.component, label) + secret.add_secret(content, relation) + self._secrets[label] = secret + return self._secrets[label] + + def remove(self, label: str) -> None: + """Remove a secret from the cache.""" + if secret := self.get(label): + try: + secret.remove() + self._secrets.pop(label) + except (SecretsUnavailableError, KeyError): + pass + else: + return + logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) + + +############################################################################## +# Models classes +############################################################################## + + +class PeerModel(BaseModel): + """Common Model for all peer relations.""" + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + extra="allow", + ) + + @model_validator(mode="after") + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get("repository") + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + if not secret: + logger.info(f"No secret for group {secret_group}") + continue + + value = secret.get_content().get(aliased_field) + + if value and field_info.annotation == OptionalSecretBool: + value = SecretBool(json.loads(value)) + elif value: + value = SecretStr(value) + setattr(self, field, value) + + return self + + @model_serializer(mode="wrap") + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get("repository") + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + value = getattr(self, field) + + actual_value = ( + value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + + if secret is None: + if value: + secret = repository.add_secret( + aliased_field, + actual_value, + secret_group, + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + continue + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if value is None: + full_content.pop(aliased_field, None) + else: + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + return handler(self) + + +class CommonModel(BaseModel): + """Common Model for both requirer and provider. + + request_id stores the request identifier for easier access. + resource is the requested resource. + """ + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + extra="allow", + ) + + resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") + request_id: str | None = Field(default=None) + salt: str = Field( + description="This salt is used to create unique hashes even when other fields map 1-1", + default_factory=gen_salt, + ) + + @model_validator(mode="after") + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get("repository") + short_uuid = self.request_id or gen_hash(self.resource, self.salt) + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + "-", "_" + ) + secret_uri: str | None = getattr(self, secret_field, None) + + if not secret_uri: + continue + + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + if not secret: + logger.info(f"No secret for group {secret_group} and short uuid {short_uuid}") + continue + + value = secret.get_content().get(aliased_field) + if value and field_info.annotation == OptionalSecretBool: + value = SecretBool(json.loads(value)) + elif value: + value = SecretStr(value) + + setattr(self, field, value) + return self + + @model_serializer(mode="wrap") + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + _encountered_secrets: set[tuple[CachedSecret, str]] = set() + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get("repository") + short_uuid = self.request_id or gen_hash(self.resource, self.salt) + # Backward compatibility for v0 regarding secrets. + if info.context.get("version") == "v0": + short_uuid = None + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + "-", "_" + ) + secret_uri: str | None = getattr(self, secret_field, None) + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + value = getattr(self, field) + + actual_value = ( + value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + + if secret is None: + if value: + secret = repository.add_secret( + aliased_field, actual_value, secret_group, short_uuid + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + setattr(self, secret_field, secret.meta.id) + continue + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if value is None: + full_content.pop(aliased_field, None) + _encountered_secrets.add((secret, secret_field)) + else: + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + + # Delete all empty secrets and clean up their fields. + for secret, secret_field in _encountered_secrets: + if not secret.get_content(): + # Setting a field to '' deletes it + setattr(self, secret_field, "") + repository.delete_secret(secret.label) + + return handler(self) + + @classmethod + def _get_secret_field(cls, field: str) -> SecretGroup | None: + """Checks if the field is a secret uri or not.""" + if not field.startswith(SECRET_PREFIX): + return None + + value = field.split("-")[1] + if info := cls.__pydantic_fields__.get(field.replace("-", "_")): + if info.annotation == SecretString: + return SecretGroup(value) + return None + + +class EntityPermissionModel(BaseModel): + """Entity Permissions Model.""" + + resource_name: str + resource_type: str + privileges: list + + +class RequirerCommonModel(CommonModel): + """Requirer side of the request model. + + extra_user_roles is used to request more roles for that user. + external_node_connectivity is used to indicate that the URI should be made for external clients when True + """ + + extra_user_roles: str | None = Field(default=None) + extra_group_roles: str | None = Field(default=None) + external_node_connectivity: bool = Field(default=False) + entity_type: Literal["USER", "GROUP"] | None = Field(default=None) + entity_permissions: list[EntityPermissionModel] | None = Field(default=None) + secret_mtls: SecretString | None = Field(default=None) + mtls_cert: MtlsSecretStr = Field(default=None) + + @model_validator(mode="after") + def validate_fields(self): + """Validates that no inconsistent request is being sent.""" + if self.entity_type and self.entity_type not in ["USER", "GROUP"]: + raise ValueError("Invalid entity-type. Possible values are USER and GROUP") + + if self.entity_type == "USER" and self.extra_group_roles: + raise ValueError("Inconsistent entity information. Use extra_user_roles instead") + + if self.entity_type == "GROUP" and self.extra_user_roles: + raise ValueError("Inconsistent entity information. Use extra_group_roles instead") + + return self + + +class ProviderCommonModel(CommonModel): + """Serialized fields added to the databag. + + endpoints stores the endpoints exposed to that client. + secret_user is a secret URI mapping to the user credentials + secret_tls is a secret URI mapping to the TLS certificate + secret_extra is a secret URI for all additional secrets requested. + """ + + endpoints: str | None = Field(default=None) + read_only_endpoints: str | None = Field(default=None) + secret_user: SecretString | None = Field(default=None) + secret_tls: SecretString | None = Field(default=None) + secret_extra: SecretString | None = Field(default=None) + secret_entity: SecretString | None = Field(default=None) + + +class ResourceProviderModel(ProviderCommonModel): + """Extended model including the deserialized fields.""" + + username: UserSecretStr = Field(default=None) + password: UserSecretStr = Field(default=None) + uris: UserSecretStr = Field(default=None) + read_only_uris: UserSecretStr = Field(default=None) + tls: TlsSecretBool = Field(default=None) + tls_ca: TlsSecretStr = Field(default=None) + entity_name: EntitySecretStr = Field(default=None) + entity_password: EntitySecretStr = Field(default=None) + version: str | None = Field(default=None) + + +class RequirerDataContractV0(RequirerCommonModel): + """Backward compatibility.""" + + version: Literal["v0"] = Field(default="v0") + + original_field: str = Field(exclude=True, default="") + + @model_validator(mode="before") + @classmethod + def ensure_original_field(cls, data: Any): + """Ensures that we keep the original field.""" + if isinstance(data, dict): + for alias in RESOURCE_ALIASES: + if data.get(alias) is not None: + data["original_field"] = alias + break + else: + for alias in RESOURCE_ALIASES: + if getattr(data, alias) is not None: + data.original_field = alias + return data + + +TResourceProviderModel = TypeVar("TResourceProviderModel", bound=ResourceProviderModel) +TRequirerCommonModel = TypeVar("TRequirerCommonModel", bound=RequirerCommonModel) + + +class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): + """The new Data Contract.""" + + version: Literal["v1"] = Field(default="v1") + requests: list[TRequirerCommonModel] + + +def discriminate_on_version(payload: Any) -> str: + """Use the version to discriminate.""" + if isinstance(payload, dict): + return payload.get("version", "v0") + return getattr(payload, "version", "v0") + + +RequirerDataContractType = Annotated[ + Annotated[RequirerDataContractV0, Tag("v0")] | Annotated[RequirerDataContractV1, Tag("v1")], + Discriminator(discriminate_on_version), +] + + +RequirerDataContract = TypeAdapter(RequirerDataContractType) + + +class DataContractV0(ResourceProviderModel): + """The Data contract of the response, for V0.""" + + +class DataContractV1(BaseModel, Generic[TResourceProviderModel]): + """The Data contract of the response, for V1.""" + + version: Literal["v1"] = Field(default="v1") + requests: list[TResourceProviderModel] = Field(default_factory=list) + + +DataContact = TypeAdapter(DataContractV1[ResourceProviderModel]) + + +TCommonModel = TypeVar("TCommonModel", bound=CommonModel) + + +def is_topic_value_acceptable(value: str | None) -> str | None: + """Check whether the given Kafka topic value is acceptable.""" + if value and "*" in value[:3]: + raise ValueError(f"Error on topic '{value}',, unacceptable value.") + return value + + +class KafkaRequestModel(RequirerCommonModel): + """Specialised model for Kafka.""" + + consumer_group_prefix: Annotated[str | None, AfterValidator(is_topic_value_acceptable)] = ( + Field(default=None) + ) + + +class KafkaResponseModel(ResourceProviderModel): + """Kafka response model.""" + + consumer_group_prefix: ExtraSecretStr = Field(default=None) + zookeeper_uris: ExtraSecretStr = Field(default=None) + + +############################################################################## +# AbstractRepository class +############################################################################## + + +class AbstractRepository(ABC): + """Abstract repository interface.""" + + @abstractmethod + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + ... + + @abstractmethod + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> str | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def get_field(self, field: str) -> str | None: + """Gets the value for one field.""" + ... + + @abstractmethod + def get_fields(self, *fields: str) -> dict[str, str | None]: + """Gets the values for all provided fields.""" + ... + + @abstractmethod + def write_field(self, field: str, value: Any) -> None: + """Writes the value in the field, without any secret support.""" + ... + + @abstractmethod + def write_fields(self, mapping: dict[str, Any]) -> None: + """Writes the values of mapping in the fields without any secret support (keys of mapping).""" + ... + + def write_secret_field( + self, field: str, value: Any, group: SecretGroup + ) -> CachedSecret | None: + """Writes a secret field.""" + ... + + @abstractmethod + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def delete_secret(self, label: str): + """Deletes a secret by its label.""" + ... + + @abstractmethod + def delete_field(self, field: str) -> None: + """Deletes a field.""" + ... + + @abstractmethod + def delete_fields(self, *fields: str) -> None: + """Deletes all the provided fields.""" + ... + + @abstractmethod + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + """Delete a field stored in a secret group.""" + ... + + @abstractmethod + def register_secret(self, secret_group: SecretGroup, short_uuid: str | None = None) -> None: + """Registers a secret using the repository.""" + ... + + @abstractmethod + def get_data(self) -> dict[str, Any] | None: + """Gets the whole data.""" + ... + + @abstractmethod + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Builds a secret field.""" + + +class OpsRepository(AbstractRepository): + """Implementation for ops repositories, with some methods left out.""" + + SECRET_FIELD_NAME: str + + IGNORES_GROUPS: list[SecretGroup] = [] + + uri_to_databag: bool = True + + def __init__( + self, + model: Model, + relation: Relation | None, + component: Unit | Application, + ): + self._local_app = model.app + self._local_unit = model.unit + self.relation = relation + self.component = component + self.model = model + self.secrets = SecretCache(model, component) + + @abstractmethod + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group mapping for secrets within a relation context.""" + ... + + @override + def get_data(self) -> dict[str, Any] | None: + ret: dict[str, Any] = {} + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + for key, value in self.relation.data[self.component].items(): + try: + ret[key] = json.loads(value) + except json.JSONDecodeError: + ret[key] = value + + return ret + + @override + @ensure_leader_for_app + def get_field( + self, + field: str, + ) -> str | None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + relation_data = self.relation.data[self.component] + return relation_data.get(field) + + @override + @ensure_leader_for_app + def get_fields(self, *fields: str) -> dict[str, str]: + res = {} + for field in fields: + if (value := self.get_field(field)) is not None: + res[field] = value + return res + + @override + @ensure_leader_for_app + def write_field(self, field: str, value: Any) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + if not value: + return None + self.relation.data[self.component].update({field: value}) + + @override + @ensure_leader_for_app + def write_fields(self, mapping: dict[str, Any]) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + (self.write_field(field, value) for field, value in mapping.items()) + + @override + @ensure_leader_for_app + def write_secret_field( + self, field: str, value: Any, secret_group: SecretGroup + ) -> CachedSecret | None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = self.get_field(self.secret_field(secret_group, field)) + + secret = self.secrets.get(label=label, uri=secret_uri) + if not secret: + return self.add_secret(field, value, secret_group) + else: + content = secret.get_content() + full_content = copy.deepcopy(content) + full_content.update({field: value}) + secret.set_content(full_content) + return secret + + @override + @ensure_leader_for_app + def delete_field(self, field: str) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + relation_data = self.relation.data[self.component] + try: + relation_data.pop(field) + except KeyError: + logger.debug( + f"Non existent field {field} was attempted to be removed from the databag (relation ID: {self.relation.id})" + ) + + @override + @ensure_leader_for_app + def delete_fields(self, *fields: str) -> None: + (self.delete_field(field) for field in fields) + + @override + @ensure_leader_for_app + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + relation_data = self.relation.data[self.component] + secret_field = self.secret_field(secret_group, field) + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = relation_data.get(secret_field) + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logging.error(f"Can't delete secret for relation {self.relation.id}") + return None + + content = secret.get_content() + new_content = copy.deepcopy(content) + try: + new_content.pop(field) + except KeyError: + logging.debug( + f"Non-existing secret '{field}' was attempted to be removed" + f"from relation {self.relation.id} and group {secret_group}" + ) + + # Write the new secret content if necessary + if new_content: + secret.set_content(new_content) + return + + # Remove the secret from the relation if it's fully gone. + try: + relation_data.pop(field) + except KeyError: + pass + self.secrets.remove(label) + return + + @ensure_leader_for_app + def register_secret(self, uri: str, secret_group: SecretGroup, short_uuid: str | None = None): + """Registers the secret group for this relation. + + [MAGIC HERE] + If we fetch a secret using get_secret(id=, label=), + then will be "stuck" on the Secret object, whenever it may + appear (i.e. as an event attribute, or fetched manually) on future occasions. + + This will allow us to uniquely identify the secret on Provider side (typically on + 'secret-changed' events), and map it to the corresponding relation. + """ + if not self.relation: + raise ValueError("Cannot register without relation.") + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + CachedSecret(self.model, self.component, label, uri).meta + + @override + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + return None + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + return self.secrets.get(label=label, uri=secret_uri) + + @override + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + uri: str | None = None, + short_uuid: str | None = None, + ) -> Any | None: + """Gets a value for a field stored in a secret group.""" + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + return None + + secret_field = self.secret_field(secret_group, field) + + relation_data = self.relation.data[self.component] + secret_uri = uri or relation_data.get(secret_field) + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + if self.uri_to_databag and not secret_uri: + logger.info(f"No secret for group {secret_group} in relation {self.relation}") + return None + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logger.info(f"No secret for group {secret_group} in relation {self.relation}") + return None + + content = secret.get_content().get(field) + + if not content: + return + + try: + return json.loads(content) + except json.JSONDecodeError: + return content + + @override + @ensure_leader_for_app + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + if not self.relation: + logger.info("No relation to get value from") + return None + + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + + label = self._generate_secret_label(self.relation, secret_group, short_uuid) + + secret = self.secrets.add(label, {field: value}, self.relation) + + if not secret.meta or not secret.meta.id: + logging.error("Secret is missing Secret ID") + raise SecretError("Secret added but is missing Secret ID") + + return secret + + @override + @ensure_leader_for_app + def delete_secret(self, label: str) -> None: + self.secrets.remove(label) + + +@final +class OpsRelationRepository(OpsRepository): + """Implementation of the Abstract Repository for non peer relations.""" + + SECRET_FIELD_NAME: str = "secret" + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + if short_uuid: + return f"{relation.name}.{relation.id}.{short_uuid}.{secret_group}.secret" + return f"{relation.name}.{relation.id}.{secret_group}.secret" + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + return f"{self.SECRET_FIELD_NAME}-{secret_group}" + + +class OpsPeerRepository(OpsRepository): + """Implementation of the Ops Repository for peer relations.""" + + SECRET_FIELD_NAME = "internal_secret" + + IGNORES_GROUPS = [ + SecretGroup("user"), + SecretGroup("entity"), + SecretGroup("mtls"), + SecretGroup("tls"), + ] + + uri_to_databag: bool = False + + @property + def scope(self) -> Scope: + """Returns a scope.""" + if isinstance(self.component, Application): + return Scope.APP + if isinstance(self.component, Unit): + return Scope.UNIT + raise ValueError("Invalid component, neither a Unit nor an Application") + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + members = [relation.name, self._local_app.name, self.scope.value] + + if secret_group != SecretGroup("extra"): + members.append(secret_group) + return f"{'.'.join(members)}" + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + if not field: + raise ValueError("Must have a field.") + return f"{field}@{secret_group}" + + +@final +class OpsPeerUnitRepository(OpsPeerRepository): + """Implementation for a unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + super().__init__(model, relation, component) + + +@final +class OpsOtherPeerUnitRepository(OpsPeerRepository): + """Implementation for a remote unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + if component == model.unit: + raise ValueError(f"Can't instantiate {self.__class__.__name__} with local unit.") + super().__init__(model, relation, component) + + @override + def write_field(self, field: str, value: Any) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def write_fields(self, mapping: dict[str, Any]) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def add_secret( + self, field: str, value: Any, secret_group: SecretGroup, short_uuid: str | None = None + ) -> CachedSecret | None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_field(self, field: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_fields(self, *fields: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + +TRepository = TypeVar("TRepository", bound=OpsRepository) +TCommon = TypeVar("TCommon", bound=BaseModel) +TPeerCommon = TypeVar("TPeerCommon", bound=PeerModel) +TCommonBis = TypeVar("TCommonBis", bound=BaseModel) + + +class RepositoryInterface(Generic[TRepository, TCommon]): + """Repository builder.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + component: Unit | Application, + repository_type: type[TRepository], + model: type[TCommon] | TypeAdapter | None, + ): + self.charm = charm + self._model = charm.model + self.repository_type = repository_type + self.relation_name = relation_name + self.model = model + self.component = component + + @property + def relations(self) -> list[Relation]: + """The list of Relation instances associated with this relation name.""" + return self._model.relations[self.relation_name] + + def repository( + self, relation_id: int, component: Unit | Application | None = None + ) -> TRepository: + """Returns a repository for the relation.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + return self.repository_type(self._model, relation, component or self.component) + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommon], + component: Unit | Application | None = None, + ) -> TCommon: ... + + @overload + def build_model( + self, + relation_id: int, + model: TypeAdapter[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: None = None, + component: Unit | Application | None = None, + ) -> TCommon: ... + + def build_model( + self, + relation_id: int, + model: type[TCommon] | TypeAdapter[TCommonBis] | None = None, + component: Unit | Application | None = None, + ) -> TCommon | TCommonBis: + """Builds a model using the repository for that relation.""" + model = model or self.model # First the provided model (allows for specialisation) + component = component or self.component + if not model: + raise ValueError("Missing model to specialise data") + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + return build_model(self.repository_type(self._model, relation, component), model) + + def write_model( + self, relation_id: int, model: BaseModel, context: dict[str, str] | None = None + ): + """Writes the model using the repository.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + + write_model( + self.repository_type(self._model, relation, self.component), model, context=context + ) + + +class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, TCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.app, OpsRelationRepository, model) + + +class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.app, OpsPeerRepository, model) + + +class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): + """Specialised Interface to build repositories for this unit peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.unit, OpsPeerUnitRepository, model) + + +class OpsOtherPeerUnitRepositoryInterface( + RepositoryInterface[OpsOtherPeerUnitRepository, TPeerCommon] +): + """Specialised Interface to build repositories for another unit peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + unit: Unit, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, unit, OpsOtherPeerUnitRepository, model) + + +############################################################################## +# DDD implementation methods +############################################################################## +############################################################################## + + +def build_model(repository: AbstractRepository, model: type[TCommon] | TypeAdapter) -> TCommon: + """Builds a common model using the provided repository and provided model structure.""" + data = repository.get_data() or {} + + data.pop("data", None) + + # Beware this means all fields should have a default value here. + if isinstance(model, TypeAdapter): + return model.validate_python(data, context={"repository": repository}) + + return model.model_validate(data, context={"repository": repository}) + + +def write_model( + repository: AbstractRepository, model: BaseModel, context: dict[str, str] | None = None +): + """Writes the data stored in the model using the repository object.""" + context = context or {} + dumped = model.model_dump( + mode="json", context={"repository": repository} | context, exclude_none=False + ) + for field, value in dumped.items(): + if value is None: + repository.delete_field(field) + continue + dumped_value = value if isinstance(value, str) else json.dumps(value) + repository.write_field(field, dumped_value) + + +############################################################################## +# Custom Events +############################################################################## + + +class ResourceProviderEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.request = request + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["request"] = pickle.dumps(self.request) + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.request = pickle.loads(snapshot["request"]) + + +class ResourceRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource requested event.""" + + pass + + +class ResourceEntityRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource Entity requested event.""" + + pass + + +class ResourceEntityPermissionsChangedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + pass + + +class MtlsCertUpdatedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + old_mtls_cert: str | None = None, + ): + super().__init__(handle, relation, app, unit, request) + + self.old_mtls_cert = old_mtls_cert + + def snapshot(self): + """Return a snapshot of the event.""" + return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} + + def restore(self, snapshot): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.old_mtls_cert = snapshot["old_mtls_cert"] + + +class BulkResourcesRequestedEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + requests: list[TRequirerCommonModel], + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.requests = requests + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["requests"] = [pickle.dumps(request) for request in self.requests] + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.requests = [pickle.loads(request) for request in snapshot["requests"]] + + +class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + bulk_resources_requested = EventSource(BulkResourcesRequestedEvent) + resource_requested = EventSource(ResourceRequestedEvent) + resource_entity_requested = EventSource(ResourceEntityRequestedEvent) + resource_entity_permissions_changed = EventSource(ResourceEntityPermissionsChangedEvent) + mtls_cert_updated = EventSource(MtlsCertUpdatedEvent) + + +class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): + """Resource created/changed event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, response + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + response: TResourceProviderModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.response = response + + def snapshot(self) -> dict: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["response"] = pickle.dumps(self.response) + return snapshot + + def restore(self, snapshot: dict): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + + self.response = pickle.loads(snapshot["response"]) + + +class ResourceCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource has been created.""" + + pass + + +class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource entity has been created.""" + + pass + + +class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read/Write enpoints are changed.""" + + pass + + +class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read-only enpoints are changed.""" + + pass + + +class AuthenticationUpdatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Authentication was updated for a user.""" + + pass + + +class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + resource_created = EventSource(ResourceCreatedEvent) + resource_entity_created = EventSource(ResourceEntityCreatedEvent) + endpoints_changed = EventSource(ResourceEndpointsChangedEvent) + read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) + authentication_updated = EventSource(AuthenticationUpdatedEvent) + + +############################################################################## +# Event Handlers +############################################################################## + + +class EventHandlers(Object): + """Requires-side of the relation.""" + + component: Application | Unit + interface: RepositoryInterface + + def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ""): + """Manager of base client relations.""" + if not unique_key: + unique_key = relation_name + super().__init__(charm, unique_key) + + self.charm = charm + self.relation_name = relation_name + + self.framework.observe( + charm.on[self.relation_name].relation_changed, + self._on_relation_changed_event, + ) + + self.framework.observe( + self.charm.on[self.relation_name].relation_created, + self._on_relation_created_event, + ) + + self.framework.observe( + charm.on.secret_changed, + self._on_secret_changed_event, + ) + + @property + def relations(self) -> list[Relation]: + """Shortcut to get access to the relations.""" + return self.interface.relations + + # Event handlers + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + pass + + @abstractmethod + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _handle_event( + self, + ): + """Handles the event and reacts accordingly.""" + pass + + def compute_diff( + self, + relation: Relation, + request: RequirerCommonModel | ResourceProviderModel, + repository: AbstractRepository | None = None, + store: bool = True, + ) -> Diff: + """Computes, stores and returns a diff for that request.""" + if not repository: + repository = OpsRelationRepository(self.model, relation, component=relation.app) + + # Gets the data stored in the databag for diff computation + old_data = get_encoded_dict(relation, self.component, "data") + + # In case we're V1, we select specifically this request + if old_data and request.request_id: + old_data: dict | None = old_data.get(request.request_id, None) + + # dump the data of the current request so we can compare + new_data = request.model_dump( + mode="json", + exclude={"data"}, + exclude_none=True, + exclude_defaults=True, + ) + + # Computes the diff + _diff = diff(old_data, new_data) + + if store: + # Update the databag with the new data for later diff computations + store_new_data(relation, self.component, new_data, short_uuid=request.request_id) + + return _diff + + def _relation_from_secret_label(self, secret_label: str) -> Relation | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 3): + return + + try: + relation_id = int(contents[1]) + except ValueError: + return + + relation_name = contents[0] + + try: + return self.model.get_relation(relation_name, relation_id) + except ModelError: + return + + def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 5): + return + + return contents[2] + + +class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): + """Event Handler for resource provider.""" + + on = ResourceProvidesEvents[TRequirerCommonModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + request_model: type[TRequirerCommonModel], + unique_key: str = "", + mtls_enabled: bool = False, + bulk_event: bool = False, + ): + """Builds a resource provider event handler. + + Args: + charm: The charm. + relation_name: The relation name this event handler is listening to. + request_model: The request model that is expected to be received. + unique_key: An optional unique key for that object. + mtls_enabled: If True, means the server supports MTLS integration. + bulk_event: If this is true, only one event will be emitted with all requests in the case of a v1 requirer. + """ + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.app + self.request_model = request_model + self.interface = OpsRelationRepositoryInterface(charm, relation_name, request_model) + self.mtls_enabled = mtls_enabled + self.bulk_event = bulk_event + + @staticmethod + def _validate_diff(event: RelationEvent, _diff: Diff) -> None: + """Validates that entity information is not changed after relation is established. + + - When entity-type changes, backwards compatibility is broken. + - When extra-user-roles changes, role membership checks become incredibly complex. + - When extra-group-roles changes, role membership checks become incredibly complex. + """ + if not isinstance(event, RelationChangedEvent): + return + + for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: + if key in _diff.changed: + raise ValueError(f"Cannot change {key} after relation has already been created") + + def _dispatch_events(self, event: RelationEvent, _diff: Diff, request: RequirerCommonModel): + if self.mtls_enabled and "secret-mtls" in _diff.added: + getattr(self.on, "mtls_cert_updated").emit( + event.relation, app=event.app, unit=event.unit, request=request, old_mtls_cert=None + ) + return + # Emit a resource requested event if the setup key (resource name) + # was added to the relation databag, but the entity-type key was not. + if resource_added(_diff) and "entity-type" not in _diff.added: + getattr(self.on, "resource_requested").emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (resource name) + # was added to the relation databag, in addition to the entity-type key. + if resource_added(_diff) and "entity-type" in _diff.added: + getattr(self.on, "resource_entity_requested").emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (resource name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + not resource_added(_diff) + and "entity-type" not in _diff.added + and ("entity-permissions" in _diff.added or "entity-permissions" in _diff.changed) + ): + getattr(self.on, "resource_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit, request=request + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request: RequirerCommonModel, + ): + _diff = self.compute_diff(event.relation, request, repository) + + self._validate_diff(event, _diff) + self._dispatch_events(event, _diff, request) + + def _handle_bulk_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request_model: RequirerDataContractV1[TRequirerCommonModel], + ): + """Validate all the diffs, then dispatch the bulk event AND THEN stores the diff. + + This allows for the developer to process the diff and store it themselves + """ + for request in request_model.requests: + # Compute the diff without storing it so we can validate the diffs. + _diff = self.compute_diff(event.relation, request, repository, store=False) + self._validate_diff(event, _diff) + + getattr(self.on, "bulk_resources_requested").emit( + event.relation, app=event.app, unit=event.unit, requests=request_model.requests + ) + + # Store all the diffs if they were not already stored. + for request in request_model.requests: + new_data = request.model_dump( + mode="json", + exclude={"data"}, + context={"repository": repository}, + exclude_none=True, + exclude_defaults=True, + ) + store_new_data(event.relation, self.component, new_data, request.request_id) + + @override + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + if not self.mtls_enabled: + logger.info("MTLS is disabled, exiting early.") + return + if not event.secret.label: + return + + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + old_mtls_cert = event.secret.get_content().get("mtls-cert") + logger.info("mtls-cert-updated") + + # V0, just fire the event. + if version == "v0": + request = build_model(repository, RequirerDataContractV0) + # V1, find the corresponding request. + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if not short_uuid: + return + for _request in request_model.requests: + if _request.request_id == short_uuid: + request = _request + break + else: + logger.info(f"Unknown request id {short_uuid}") + return + + getattr(self.on, "mtls_cert_updated").emit( + relation, + app=relation.app, + unit=remote_unit, + request=request, + mtls_cert=old_mtls_cert, + ) + + @override + def _on_relation_changed_event(self, event: RelationChangedEvent): + if not self.charm.unit.is_leader(): + return + + repository = OpsRelationRepository( + self.model, event.relation, component=event.relation.app + ) + + # Don't do anything until we get some data + if not repository.get_data(): + return + + version = repository.get_field("version") or "v0" + if version == "v0": + request_model = build_model(repository, RequirerDataContractV0) + old_name = request_model.original_field + request_model.request_id = None # For safety, let's ensure that we don't have a model. + self._handle_event(event, repository, request_model) + logger.info( + f"Patching databag for v0 compatibility: replacing 'resource' by '{old_name}'" + ) + self.interface.repository( + event.relation.id, + ).write_field(old_name, request_model.resource) + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if self.bulk_event: + self._handle_bulk_event(event, repository, request_model) + return + for request in request_model.requests: + self._handle_event(event, repository, request) + + def set_response(self, relation_id: int, response: ResourceProviderModel): + r"""Sets a response in the databag. + + This function will react accordingly to the version number. + If the version number is v0, then we write the data directly in the databag. + If the version number is v1, then we write the data in the list of responses. + + /!\ This function updates a response if it was already present in the databag! + + Args: + relation_id: The specific relation id for that event. + response: The response to write in the databag. + """ + if not self.charm.unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + raise ValueError("Missing relation.") + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + if version == "v0": + # Ensure the request_id is None + response.request_id = None + self.interface.write_model( + relation_id, response, context={"version": "v0"} + ) # {"database": "database-name", "secret-user": "uri", ...} + return + + model = self.interface.build_model(relation_id, DataContractV1[response.__class__]) + + # for/else syntax allows to execute the else if break was not called. + # This allows us to update or append easily. + for index, _response in enumerate(model.requests): + if _response.request_id == response.request_id: + model.requests[index] = response + break + else: + model.requests.append(response) + + self.interface.write_model(relation_id, model) + return + + +class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): + """Event Handler for resource requirer.""" + + on = ResourceRequiresEvents[TResourceProviderModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + requests: list[RequirerCommonModel], + response_model: type[TResourceProviderModel], + unique_key: str = "", + relation_aliases: list[str] | None = None, + ): + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.unit + self.relation_aliases = relation_aliases + self._requests = requests + self.response_model = DataContractV1[response_model] + self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( + OpsRelationRepositoryInterface(charm, relation_name, self.response_model) + ) + + if requests: + self._request_model = requests[0].__class__ + else: + self._request_model = RequirerCommonModel + + # First, check that the number of aliases matches the one defined in charm metadata. + if self.relation_aliases: + relation_connection_limit = self.charm.meta.requires[relation_name].limit + if len(self.relation_aliases) != relation_connection_limit: + raise ValueError( + f"Invalid number of aliases, expected {relation_connection_limit}, received {len(self.relation_aliases)}" + ) + + # Created custom event names for each alias. + if self.relation_aliases: + for relation_alias in self.relation_aliases: + self.on.define_event( + f"{relation_alias}_resource_created", + ResourceCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_resource_entity_created", + ResourceEntityCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_endpoints_changed", + ResourceEndpointsChangedEvent, + ) + self.on.define_event( + f"{relation_alias}_read_only_endpoints_changed", + ResourceReadOnlyEndpointsChangedEvent, + ) + + ############################################################################## + # Extra useful functions + ############################################################################## + def is_resource_created( + self, + rel_id: int, + request_id: str, + model: DataContractV1[TResourceProviderModel] | None = None, + ) -> bool: + """Checks if a resource has been created or not. + + Args: + rel_id: The relation id to check. + request_id: The specific request id to check. + model: An optional model to use (for performances). + """ + if not model: + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + for request in model.requests: + if request.request_id == request_id: + return request.secret_user is not None or request.secret_entity is not None + return False + + def are_all_resources_created(self, rel_id: int) -> bool: + """Checks that all resources have been created for a relation. + + Args: + rel_id: The relation id to check. + """ + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + return all( + self.is_resource_created(rel_id, request.request_id, model) + for request in model.requests + if request.request_id + ) + + @staticmethod + def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: + # Actual checking method. + # No need to check for psycopg here, it's been checked before. + if not psycopg2: + return False + + try: + with psycopg2.connect(connection_string) as connection: + with connection.cursor() as cursor: + cursor.execute( + "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) + ) + return cursor.fetchone() is not None + except psycopg2.Error as e: + logger.exception( + f"failed to check whether {plugin} plugin is enabled in the database: %s", + str(e), + ) + return False + + def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: + """Returns whether a plugin is enabled in the database. + + Args: + plugin: name of the plugin to check. + relation_index: Optional index to check the database (default: 0 - first relation). + """ + if not psycopg2: + return False + + # Can't check a non existing relation. + if len(self.relations) <= relation_index: + return False + + relation = self.relations[relation_index] + model = self.interface.build_model(relation_id=relation.id, component=relation.app) + for request in model.requests: + if request.endpoints and request.username and request.password: + host = request.endpoints.split(":")[0] + username = request.username.get_secret_value() + password = request.password.get_secret_value() + + connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" + return self._is_pg_plugin_enabled(plugin, connection_string) + logger.info("No valid request to use to check for plugin.") + return False + + ############################################################################## + # Helpers for aliases + ############################################################################## + + def _assign_relation_alias(self, relation_id: int) -> None: + """Assigns an alias to a relation. + + This function writes in the unit data bag. + + Args: + relation_id: the identifier for a particular relation. + """ + # If no aliases were provided, return immediately. + if not self.relation_aliases: + return + + # Return if an alias was already assigned to this relation + # (like when there are more than one unit joining the relation). + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation and relation.data[self.charm.unit].get("alias"): + return + + # Retrieve the available aliases (the ones that weren't assigned to any relation). + available_aliases = self.relation_aliases[:] + for relation in self.charm.model.relations[self.relation_name]: + alias = relation.data[self.charm.unit].get("alias") + if alias: + logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) + available_aliases.remove(alias) + + # Set the alias in the unit relation databag of the specific relation. + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation: + relation.data[self.charm.unit].update({"alias": available_aliases[0]}) + + # We need to set relation alias also on the application level so, + # it will be accessible in show-unit juju command, executed for a consumer application unit + if relation and self.charm.unit.is_leader(): + relation.data[self.charm.app].update({"alias": available_aliases[0]}) + + def _emit_aliased_event( + self, event: RelationChangedEvent, event_name: str, response: ResourceProviderModel + ): + """Emit all aliased events.""" + alias = self._get_relation_alias(event.relation.id) + if alias: + getattr(self.on, f"{alias}_{event_name}").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + + def _get_relation_alias(self, relation_id: int) -> str | None: + """Gets the relation alias for a relation id.""" + for relation in self.charm.model.relations[self.relation_name]: + if relation.id == relation_id: + return relation.data[self.charm.unit].get("alias") + return None + + ############################################################################## + # Event Handlers + ############################################################################## + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + + response_model = self.interface.build_model(relation.id) + if not short_uuid: + return + for _response in response_model.requests: + if _response.request_id == short_uuid: + response = _response + break + else: + logger.info(f"Unknown request id {short_uuid}") + return + + getattr(self.on, "authentication_updated").emit( + relation, + app=relation.app, + unit=remote_unit, + response=response, + ) + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the database relation is created.""" + super()._on_relation_created_event(event) + + repository = OpsRelationRepository(self.model, event.relation, self.charm.app) + + # If relations aliases were provided, assign one to the relation. + self._assign_relation_alias(event.relation.id) + + if not self.charm.unit.is_leader(): + return + + # Generate all requests id so they are saved already. + for request in self._requests: + request.request_id = gen_hash(request.resource, request.salt) + + full_request = RequirerDataContractV1[self._request_model]( + version="v1", requests=self._requests + ) + write_model(repository, full_request) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the database relation has changed.""" + is_subordinate = False + remote_unit_data = None + for key in event.relation.data.keys(): + if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): + remote_unit_data = event.relation.data[key] + elif isinstance(key, Application) and key.name != self.charm.app.name: + is_subordinate = event.relation.data[key].get("subordinated") == "true" + + if is_subordinate: + if not remote_unit_data or remote_unit_data.get("state") != "ready": + return + + repository = self.interface.repository(event.relation.id, event.app) + response_model = self.interface.build_model(event.relation.id, component=event.app) + + if not response_model.requests: + logger.info("Still waiting for data.") + return + + data = repository.get_field("data") + if not data: + logger.info("Missing data to compute diffs") + return + + request_map = TypeAdapter(dict[str, self._request_model]).validate_json(data) + + for response in response_model.requests: + response_id = response.request_id or gen_hash(response.resource, response.salt) + request = request_map.get(response_id, None) + if not request: + raise ValueError( + f"No request matching the response with response_id {response_id}" + ) + self._handle_event(event, repository, request, response) + + ############################################################################## + # Methods to handle specificities of relation events + ############################################################################## + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: OpsRelationRepository, + request: RequirerCommonModel, + response: ResourceProviderModel, + ): + _diff = self.compute_diff(event.relation, response, repository, store=True) + + for newval in _diff.added: + if secret_group := response._get_secret_field(newval): + uri = getattr(response, newval.replace("-", "_")) + repository.register_secret(uri, secret_group, response.request_id) + + if "secret-user" in _diff.added and not request.entity_type: + logger.info(f"resource {response.resource} created at {datetime.now()}") + getattr(self.on, "resource_created").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "resource_created", response) + return + + if "secret-entity" in _diff.added and request.entity_type: + logger.info(f"entity {response.entity_name} created at {datetime.now()}") + getattr(self.on, "resource_entity_created").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "resource_entity_created", response) + return + + if "endpoints" in _diff.added or "endpoints" in _diff.changed: + logger.info(f"endpoints changed at {datetime.now()}") + getattr(self.on, "endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "endpoints_changed", response) + return + + if "read-only-endpoints" in _diff.added or "read-only-endpoints" in _diff.changed: + logger.info(f"read-only-endpoints changed at {datetime.now()}") + getattr(self.on, "read_only_endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "read_only_endpoints_changed", response) + return diff --git a/tests/v1/integration/dummy-database-charm/src/charm.py b/tests/v1/integration/dummy-database-charm/src/charm.py index c10c0b03..05c70362 100755 --- a/tests/v1/integration/dummy-database-charm/src/charm.py +++ b/tests/v1/integration/dummy-database-charm/src/charm.py @@ -18,9 +18,11 @@ from ops.main import main from ops.model import ActiveStatus from pydantic import Field +from pydantic.types import _SecretBase from charms.data_platform_libs.v1.data_interfaces import ( DataContractV1, + ExtraSecretStr, OpsOtherPeerUnitRepository, OpsPeerRepositoryInterface, OpsPeerUnitRepositoryInterface, @@ -42,10 +44,16 @@ class PeerAppModel(PeerModel): field: MygroupSecretStr = Field(default=None) + not_a_secret: str | None = Field(default=None) + new_field: ExtraSecretStr = Field(default=None) + mygroup_field1: MygroupSecretStr = Field(default=None) + mygroup_field2: MygroupSecretStr = Field(default=None) class ExtendedResourceProviderModel(ResourceProviderModel): field: MygroupSecretStr = Field(default=None) + not_a_secret: str | None = Field(default=None) + new_field: ExtraSecretStr = Field(default=None) ExtendedDataContractV1 = DataContractV1[ExtendedResourceProviderModel] @@ -161,6 +169,7 @@ def _on_get_peer_secret(self, event: ActionEvent): repository = self.peer_relation_unit.repository(relation_bis.id) result = repository.get_secret_field(event.params["field"], event.params["group"]) + result = result.get_secret_value() if issubclass(result.__class__, _SecretBase) else result event.set_results({event.params["field"]: result if result else ""}) def _on_set_peer_secret(self, event: ActionEvent): @@ -180,7 +189,9 @@ def _on_set_peer_secret(self, event: ActionEvent): relation_bis = self.peer_relation_unit.relations[0] repository = self.peer_relation_unit.repository(relation_bis.id) repository.write_secret_field( - event.params["field"], event.params["value"], event.params["group"] + event.params["field"], + event.params["value"], + event.params["group"] or SecretGroup("extra"), ) # Remove peer secrets @@ -206,16 +217,15 @@ def _on_get_peer_relation_field(self, event: ActionEvent): value = None if component == "app": - relation_bis = self.peer_relation_app.relations[0] - repository = self.peer_relation_app.repository(relation_bis.id) - value_new = repository.get_field(event.params["field"]) + relation = self.peer_relation_app.relations[0] + model = self.peer_relation_app.build_model(relation.id) + value = getattr(model, event.params["field"].replace("-", "_")) else: - relation_bis = self.peer_relation_unit.relations[0] - repository = self.peer_relation_unit.repository(relation_bis.id) - value_new = repository.get_field(event.params["field"]) - event.set_results( - {"value": value if value else "", "value-new": value_new if value_new else ""} - ) + relation = self.peer_relation_unit.relations[0] + model = self.peer_relation_unit.build_model(relation.id) + value = getattr(model, event.params["field"].replace("-", "_")) + value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + event.set_results({"value": value if value else ""}) if __name__ == "__main__": diff --git a/tests/v1/integration/kafka-charm/charmcraft.yaml b/tests/v1/integration/kafka-charm/charmcraft.yaml index cf133daf..1ad20055 100644 --- a/tests/v1/integration/kafka-charm/charmcraft.yaml +++ b/tests/v1/integration/kafka-charm/charmcraft.yaml @@ -2,12 +2,11 @@ # See LICENSE file for licensing details. type: charm -# Whenever "bases" is changed: -# - Update tests/integration/conftest.py::pytest_configure() -# - Update .github/workflow/ci.yaml integration-test matrix +base: ubuntu@22.04 platforms: - ubuntu@22.04:amd64: - ubuntu@24.04:amd64: + amd64: + build-on: [amd64] + build-for: [amd64] # Files implicitly created by charmcraft without a part: # - dispatch (https://github.com/canonical/charmcraft/pull/1898) @@ -59,14 +58,7 @@ parts: - libssl-dev # Needed to build Python dependencies with Rust from source - pkg-config # Needed to build Python dependencies with Rust from source override-build: | - # Workaround for https://github.com/canonical/charmcraft/issues/2068 - # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source - if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] - then - snap install rustup --classic - else - apt-get install rustup -y - fi + snap install rustup --classic # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu # archive—which means the rustup version could be updated at any time. Print rustup version diff --git a/tests/v1/integration/kafka-charm/lib/charms/data_platform_libs/v1/data_interfaces.py b/tests/v1/integration/kafka-charm/lib/charms/data_platform_libs/v1/data_interfaces.py new file mode 100644 index 00000000..e22388d1 --- /dev/null +++ b/tests/v1/integration/kafka-charm/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -0,0 +1,2753 @@ +# Copyright 2025 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +r"""Library to manage the relation for the data-platform products. + +This V1 has been specified in https://docs.google.com/document/d/1lnuonWnoQb36RWYwfHOBwU0VClLbawpTISXIC_yNKYo, and should be backward compatible with v0 clients. + +This library contains the Requires and Provides classes for handling the relation +between an application and multiple managed application supported by the data-team: +MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. + +#### Models + +This library exposes basic default models that can be used in most cases. +If you need more complex models, you can subclass them. + +```python +from charms.data_platform_libs.v1.data_interfaces import RequirerCommonModel, ExtraSecretStr + +class ExtendedCommonModel(RequirerCommonModel): + operator_password: ExtraSecretStr +``` + +Secret groups are handled using annotated types. If you wish to add extra secret groups, please follow the following model. The string metadata represents the secret group name, and `OptionalSecretStr` is a TypeAlias for `SecretStr | None`. Finally, `SecretStr` represents a field validating the URI pattern `secret:.*` + +```python +MyGroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] +``` + +Fields not specified as OptionalSecretStr and extended with a group name in the metadata will NOT get serialised. + + +#### Requirer Charm + +This library is a uniform interface to a selection of common database +metadata, with added custom events that add convenience to database management, +and methods to consume the application related data. + + +```python +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ClientCharm(CharmBase): + # Database charm that accepts connections from application charms. + def __init__(self, *args) -> None: + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + RequirerCommonModel( + entity_type="USER", + ) + ] + self.database = ResourceRequirerEventHandler( + self,"database", requests, response_model=ResourceProviderModel + ) + self.framework.observe(self.database.on.resource_created, self._on_resource_created) + self.framework.observe(self.database.on.resource_entity_created, self._on_resource_entity_created) + + def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new database is created. + relation_id = event.relation.id + response = event.response # This is the response model + + username = event.response.username + password = event.response.password + ... + + def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new entity is created. + ... + +Compared to V1, this library makes heavy use of pydantic models, and allows for +multiple requests, specified as a list. +On the Requirer side, each response will trigger one custom event for that response. +This way, it allows for more strategic events to be emitted according to the request. + +As show above, the library provides some custom events to handle specific situations, which are listed below: +- resource_created: event emitted when the requested database is created. +- resource_entity_created: event emitted when the requested entity is created. +- endpoints_changed: event emitted when the read/write endpoints of the database have changed. +- read_only_endpoints_changed: event emitted when the read-only endpoints of the database + have changed. Event is not triggered if read/write endpoints changed too. + +If it is needed to connect multiple database clusters to the same relation endpoint +the application charm can implement the same code as if it would connect to only +one database cluster (like the above code example). + +To differentiate multiple clusters connected to the same relation endpoint +the application charm can use the name of the remote application: + +```python + +def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Get the remote app name of the cluster that triggered this event + cluster = event.relation.app.name +``` + +It is also possible to provide an alias for each different database cluster/relation. + +So, it is possible to differentiate the clusters in two ways. +The first is to use the remote application name, i.e., `event.relation.app.name`, as above. + +The second way is to use different event handlers to handle each cluster events. +The implementation would be something like the following code: + +```python + +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ApplicationCharm(CharmBase): + # Application charm that connects to database charms. + + def __init__(self, *args): + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + ] + # Define the cluster aliases and one handler for each cluster database created event. + self.database = ResourceRequirerEventHandler( + self, + relation_name="database" + relations_aliases = ["cluster1", "cluster2"], + requests= + ) + self.framework.observe( + self.database.on.cluster1_resource_created, self._on_cluster1_resource_created + ) + self.framework.observe( + self.database.on.cluster2_resource_created, self._on_cluster2_resource_created + ) + + def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster1 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... + + def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster2 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... +``` + +### Provider Charm + +Following an example of using the ResourceRequestedEvent, in the context of the +database charm code: + +```python +from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides + +class SampleCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + # Charm events defined in the database provides charm library. + self.provided_database = DatabaseProvides(self, relation_name="database") + self.framework.observe(self.provided_database.on.database_requested, + self._on_database_requested) + # Database generic helper + self.database = DatabaseHelper() + + def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: + # Handle the event triggered by a new database requested in the relation + # Retrieve the database name using the charm library. + db_name = event.database + # generate a new user credential + username = self.database.generate_user() + password = self.database.generate_password() + # set the credentials for the relation + self.provided_database.set_credentials(event.relation.id, username, password) + # set other variables for the relation event.set_tls("False") +``` + +As shown above, the library provides a custom event (database_requested) to handle +the situation when an application charm requests a new database to be created. +It's preferred to subscribe to this event instead of relation changed event to avoid +creating a new database when other information other than a database name is +exchanged in the relation databag. + +""" + +import copy +import hashlib +import json +import logging +import pickle +import random +import string +from abc import ABC, abstractmethod +from collections import namedtuple +from datetime import datetime +from enum import Enum +from typing import ( + Annotated, + Any, + ClassVar, + Generic, + Literal, + NewType, + TypeAlias, + TypeVar, + final, + overload, +) + +from ops import ( + CharmBase, + EventBase, + Model, + RelationChangedEvent, + RelationCreatedEvent, + RelationEvent, + Secret, + SecretChangedEvent, + SecretInfo, + SecretNotFoundError, +) +from ops.charm import CharmEvents +from ops.framework import EventSource, Handle, Object +from ops.model import Application, ModelError, Relation, Unit +from pydantic import ( + AfterValidator, + AliasChoices, + BaseModel, + ConfigDict, + Discriminator, + Field, + SecretStr, + SerializationInfo, + SerializerFunctionWrapHandler, + Tag, + TypeAdapter, + ValidationInfo, + model_serializer, + model_validator, +) +from pydantic.types import _SecretBase, _SecretField +from pydantic_core import CoreSchema, core_schema +from typing_extensions import TypeAliasType, override + +try: + import psycopg2 +except ImportError: + psycopg2 = None + +# The unique Charmhub library identifier, never change it +LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" + +# Increment this major API version when introducing breaking changes +LIBAPI = 1 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 0 + +PYDEPS = ["ops>=2.0.0", "pydantic>=2.11"] + +logger = logging.getLogger(__name__) + +MODEL_ERRORS = { + "not_leader": "this unit is not the leader", + "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", + "owner_no_refresh": "ERROR secret owner cannot use --refresh", +} + +RESOURCE_ALIASES = [ + "database", + "subject", + "topic", + "index", + "plugin-url", +] + +SECRET_PREFIX = "secret-" + + +############################################################################## +# Exceptions +############################################################################## + + +class DataInterfacesError(Exception): + """Common ancestor for DataInterfaces related exceptions.""" + + +class SecretError(DataInterfacesError): + """Common ancestor for Secrets related exceptions.""" + + +class SecretAlreadyExistsError(SecretError): + """A secret that was to be added already exists.""" + + +class SecretsUnavailableError(SecretError): + """Secrets aren't yet available for Juju version used.""" + + +class IllegalOperationError(DataInterfacesError): + """To be used when an operation is not allowed to be performed.""" + + +############################################################################## +# Global helpers / utilities +############################################################################## + + +def gen_salt() -> str: + """Generates a consistent salt.""" + return "".join(random.choices(string.ascii_letters + string.digits, k=16)) + + +def gen_hash(resource_name: str, salt: str) -> str: + """Generates a consistent hash based on the resource name and salt.""" + hasher = hashlib.sha256() + hasher.update(f"{resource_name}:{salt}".encode()) + return hasher.hexdigest()[:16] + + +def ensure_leader_for_app(f): + """Decorator to ensure that only leader can perform given operation.""" + + def wrapper(self, *args, **kwargs): + if self.component == self._local_app and not self._local_unit.is_leader(): + logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") + return + return f(self, *args, **kwargs) + + wrapper.leader_only = True + return wrapper + + +def get_encoded_dict( + relation: Relation, member: Unit | Application, field: str +) -> dict[str, Any] | None: + """Retrieve and decode an encoded field from relation data.""" + data = json.loads(relation.data[member].get(field, "{}")) + if isinstance(data, dict): + return data + logger.error("Unexpected datatype for %s instead of dict.", str(data)) + + +Diff = namedtuple("Diff", ["added", "changed", "deleted"]) +Diff.__doc__ = """ +A tuple for storing the diff between two data mappings. + +added - keys that were added +changed - keys that still exist but have new values +deleted - key that were deleted""" + + +def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: + """Retrieves the diff of the data in the relation changed databag for v1. + + Args: + old_data: dictionary of the stored data before the event. + new_data: dictionary of the received data to compute the diff. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + old_data = old_data or {} + + # These are the keys that were added to the databag and triggered this event. + added = new_data.keys() - old_data.keys() + # These are the keys that were removed from the databag and triggered this event. + deleted = old_data.keys() - new_data.keys() + # These are the keys that already existed in the databag, + # but had their values changed. + changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} + # Return the diff with all possible changes. + return Diff(added, changed, deleted) + + +def resource_added(diff: Diff) -> bool: + """Ensures that one of the aliased resources has been added.""" + return any(item in diff.added for item in RESOURCE_ALIASES + ["resource"]) + + +def store_new_data( + relation: Relation, + component: Unit | Application, + new_data: dict[str, str], + short_uuid: str | None = None, +): + """Stores the new data in the databag for diff computation.""" + # First, the case for V0 + if not short_uuid: + relation.data[component].update({"data": json.dumps(new_data)}) + # Then the case for V1, where we have a ShortUUID + else: + data = json.loads(relation.data[component].get("data", "{}")) + if not isinstance(data, dict): + raise ValueError + newest_data = copy.deepcopy(data) + newest_data[short_uuid] = new_data + relation.data[component].update({"data": json.dumps(newest_data)}) + + +############################################################################## +# Helper classes +############################################################################## + +SecretGroup = NewType("SecretGroup", str) + + +SecretString = TypeAliasType("SecretString", Annotated[str, Field(pattern="secret:.*")]) + + +class SecretBool(_SecretField[bool]): + """Class for booleans as secrets.""" + + _inner_schema: ClassVar[CoreSchema] = core_schema.bool_schema() + _error_kind: ClassVar[str] = "bool_type" + + def _display(self) -> str: + return "****" + + +OptionalSecretStr: TypeAlias = SecretStr | None +OptionalSecretBool: TypeAlias = SecretBool | None + +OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) + +UserSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "user"] +TlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "tls"] +TlsSecretBool = Annotated[OptionalSecretBool, Field(exclude=True, default=None), "tls"] +MtlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mtls"] +ExtraSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "extra"] +EntitySecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "entity"] + + +class Scope(Enum): + """Peer relations scope.""" + + APP = "app" + UNIT = "unit" + + +class CachedSecret: + """Locally cache a secret. + + The data structure is precisely reusing/simulating as in the actual Secret Storage + """ + + KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] + + def __init__( + self, + model: Model, + component: Application | Unit, + label: str, + secret_uri: str | None = None, + ): + self._secret_meta = None + self._secret_content = {} + self._secret_uri = secret_uri + self.label = label + self._model = model + self.component = component + self.current_label = None + + @property + def meta(self) -> Secret | None: + """Getting cached secret meta-information.""" + if not self._secret_meta: + if not (self._secret_uri or self.label): + return + + try: + self._secret_meta = self._model.get_secret(label=self.label) + except SecretNotFoundError: + # Falling back to seeking for potential legacy labels + logger.info(f"Secret with label {self.label} not found") + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) + return self._secret_meta + + ########################################################################## + # Public functions + ########################################################################## + + def add_secret( + self, + content: dict[str, str], + relation: Relation | None = None, + label: str | None = None, + ) -> Secret: + """Create a new secret.""" + if self._secret_uri: + raise SecretAlreadyExistsError( + "Secret is already defined with uri %s", self._secret_uri + ) + + label = self.label if not label else label + + secret = self.component.add_secret(content, label=label) + if relation and relation.app != self._model.app: + # If it's not a peer relation, grant is to be applied + secret.grant(relation) + self._secret_uri = secret.id + self._secret_meta = secret + return self._secret_meta + + def get_content(self) -> dict[str, str]: + """Getting cached secret content.""" + if not self._secret_content: + if self.meta: + try: + self._secret_content = self.meta.get_content(refresh=True) + except (ValueError, ModelError) as err: + # https://bugs.launchpad.net/juju/+bug/2042596 + # Only triggered when 'refresh' is set + if isinstance(err, ModelError) and not any( + msg in str(err) for msg in self.KNOWN_MODEL_ERRORS + ): + raise + # Due to: ValueError: Secret owner cannot use refresh=True + self._secret_content = self.meta.get_content() + return self._secret_content + + def set_content(self, content: dict[str, str]) -> None: + """Setting cached secret content.""" + if not self.meta: + return + + if content == self.get_content(): + return + + if content: + self.meta.set_content(content) + self._secret_content = content + else: + self.meta.remove_all_revisions() + + def get_info(self) -> SecretInfo | None: + """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" + if self.meta: + return self.meta.get_info() + + def remove(self) -> None: + """Remove secret.""" + if not self.meta: + raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") + try: + self.meta.remove_all_revisions() + except SecretNotFoundError: + pass + self._secret_content = {} + self._secret_meta = None + self._secret_uri = None + + +class SecretCache: + """A data structure storing CachedSecret objects.""" + + def __init__(self, model: Model, component: Application | Unit): + self._model = model + self.component = component + self._secrets: dict[str, CachedSecret] = {} + + def get(self, label: str, uri: str | None = None) -> CachedSecret | None: + """Getting a secret from Juju Secret store or cache.""" + if not self._secrets.get(label): + secret = CachedSecret(self._model, self.component, label, uri) + if secret.meta: + self._secrets[label] = secret + return self._secrets.get(label) + + def add(self, label: str, content: dict[str, str], relation: Relation) -> CachedSecret: + """Adding a secret to Juju Secret.""" + if self._secrets.get(label): + raise SecretAlreadyExistsError(f"Secret {label} already exists") + + secret = CachedSecret(self._model, self.component, label) + secret.add_secret(content, relation) + self._secrets[label] = secret + return self._secrets[label] + + def remove(self, label: str) -> None: + """Remove a secret from the cache.""" + if secret := self.get(label): + try: + secret.remove() + self._secrets.pop(label) + except (SecretsUnavailableError, KeyError): + pass + else: + return + logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) + + +############################################################################## +# Models classes +############################################################################## + + +class PeerModel(BaseModel): + """Common Model for all peer relations.""" + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + extra="allow", + ) + + @model_validator(mode="after") + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get("repository") + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + if not secret: + logger.info(f"No secret for group {secret_group}") + continue + + value = secret.get_content().get(aliased_field) + + if value and field_info.annotation == OptionalSecretBool: + value = SecretBool(json.loads(value)) + elif value: + value = SecretStr(value) + setattr(self, field, value) + + return self + + @model_serializer(mode="wrap") + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get("repository") + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + value = getattr(self, field) + + actual_value = ( + value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + + if secret is None: + if value: + secret = repository.add_secret( + aliased_field, + actual_value, + secret_group, + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + continue + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if value is None: + full_content.pop(aliased_field, None) + else: + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + return handler(self) + + +class CommonModel(BaseModel): + """Common Model for both requirer and provider. + + request_id stores the request identifier for easier access. + resource is the requested resource. + """ + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + extra="allow", + ) + + resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") + request_id: str | None = Field(default=None) + salt: str = Field( + description="This salt is used to create unique hashes even when other fields map 1-1", + default_factory=gen_salt, + ) + + @model_validator(mode="after") + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get("repository") + short_uuid = self.request_id or gen_hash(self.resource, self.salt) + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + "-", "_" + ) + secret_uri: str | None = getattr(self, secret_field, None) + + if not secret_uri: + continue + + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + if not secret: + logger.info(f"No secret for group {secret_group} and short uuid {short_uuid}") + continue + + value = secret.get_content().get(aliased_field) + if value and field_info.annotation == OptionalSecretBool: + value = SecretBool(json.loads(value)) + elif value: + value = SecretStr(value) + + setattr(self, field, value) + return self + + @model_serializer(mode="wrap") + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + _encountered_secrets: set[tuple[CachedSecret, str]] = set() + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get("repository") + short_uuid = self.request_id or gen_hash(self.resource, self.salt) + # Backward compatibility for v0 regarding secrets. + if info.context.get("version") == "v0": + short_uuid = None + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + "-", "_" + ) + secret_uri: str | None = getattr(self, secret_field, None) + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + value = getattr(self, field) + + actual_value = ( + value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + + if secret is None: + if value: + secret = repository.add_secret( + aliased_field, actual_value, secret_group, short_uuid + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + setattr(self, secret_field, secret.meta.id) + continue + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if value is None: + full_content.pop(aliased_field, None) + _encountered_secrets.add((secret, secret_field)) + else: + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + + # Delete all empty secrets and clean up their fields. + for secret, secret_field in _encountered_secrets: + if not secret.get_content(): + # Setting a field to '' deletes it + setattr(self, secret_field, "") + repository.delete_secret(secret.label) + + return handler(self) + + @classmethod + def _get_secret_field(cls, field: str) -> SecretGroup | None: + """Checks if the field is a secret uri or not.""" + if not field.startswith(SECRET_PREFIX): + return None + + value = field.split("-")[1] + if info := cls.__pydantic_fields__.get(field.replace("-", "_")): + if info.annotation == SecretString: + return SecretGroup(value) + return None + + +class EntityPermissionModel(BaseModel): + """Entity Permissions Model.""" + + resource_name: str + resource_type: str + privileges: list + + +class RequirerCommonModel(CommonModel): + """Requirer side of the request model. + + extra_user_roles is used to request more roles for that user. + external_node_connectivity is used to indicate that the URI should be made for external clients when True + """ + + extra_user_roles: str | None = Field(default=None) + extra_group_roles: str | None = Field(default=None) + external_node_connectivity: bool = Field(default=False) + entity_type: Literal["USER", "GROUP"] | None = Field(default=None) + entity_permissions: list[EntityPermissionModel] | None = Field(default=None) + secret_mtls: SecretString | None = Field(default=None) + mtls_cert: MtlsSecretStr = Field(default=None) + + @model_validator(mode="after") + def validate_fields(self): + """Validates that no inconsistent request is being sent.""" + if self.entity_type and self.entity_type not in ["USER", "GROUP"]: + raise ValueError("Invalid entity-type. Possible values are USER and GROUP") + + if self.entity_type == "USER" and self.extra_group_roles: + raise ValueError("Inconsistent entity information. Use extra_user_roles instead") + + if self.entity_type == "GROUP" and self.extra_user_roles: + raise ValueError("Inconsistent entity information. Use extra_group_roles instead") + + return self + + +class ProviderCommonModel(CommonModel): + """Serialized fields added to the databag. + + endpoints stores the endpoints exposed to that client. + secret_user is a secret URI mapping to the user credentials + secret_tls is a secret URI mapping to the TLS certificate + secret_extra is a secret URI for all additional secrets requested. + """ + + endpoints: str | None = Field(default=None) + read_only_endpoints: str | None = Field(default=None) + secret_user: SecretString | None = Field(default=None) + secret_tls: SecretString | None = Field(default=None) + secret_extra: SecretString | None = Field(default=None) + secret_entity: SecretString | None = Field(default=None) + + +class ResourceProviderModel(ProviderCommonModel): + """Extended model including the deserialized fields.""" + + username: UserSecretStr = Field(default=None) + password: UserSecretStr = Field(default=None) + uris: UserSecretStr = Field(default=None) + read_only_uris: UserSecretStr = Field(default=None) + tls: TlsSecretBool = Field(default=None) + tls_ca: TlsSecretStr = Field(default=None) + entity_name: EntitySecretStr = Field(default=None) + entity_password: EntitySecretStr = Field(default=None) + version: str | None = Field(default=None) + + +class RequirerDataContractV0(RequirerCommonModel): + """Backward compatibility.""" + + version: Literal["v0"] = Field(default="v0") + + original_field: str = Field(exclude=True, default="") + + @model_validator(mode="before") + @classmethod + def ensure_original_field(cls, data: Any): + """Ensures that we keep the original field.""" + if isinstance(data, dict): + for alias in RESOURCE_ALIASES: + if data.get(alias) is not None: + data["original_field"] = alias + break + else: + for alias in RESOURCE_ALIASES: + if getattr(data, alias) is not None: + data.original_field = alias + return data + + +TResourceProviderModel = TypeVar("TResourceProviderModel", bound=ResourceProviderModel) +TRequirerCommonModel = TypeVar("TRequirerCommonModel", bound=RequirerCommonModel) + + +class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): + """The new Data Contract.""" + + version: Literal["v1"] = Field(default="v1") + requests: list[TRequirerCommonModel] + + +def discriminate_on_version(payload: Any) -> str: + """Use the version to discriminate.""" + if isinstance(payload, dict): + return payload.get("version", "v0") + return getattr(payload, "version", "v0") + + +RequirerDataContractType = Annotated[ + Annotated[RequirerDataContractV0, Tag("v0")] | Annotated[RequirerDataContractV1, Tag("v1")], + Discriminator(discriminate_on_version), +] + + +RequirerDataContract = TypeAdapter(RequirerDataContractType) + + +class DataContractV0(ResourceProviderModel): + """The Data contract of the response, for V0.""" + + +class DataContractV1(BaseModel, Generic[TResourceProviderModel]): + """The Data contract of the response, for V1.""" + + version: Literal["v1"] = Field(default="v1") + requests: list[TResourceProviderModel] = Field(default_factory=list) + + +DataContact = TypeAdapter(DataContractV1[ResourceProviderModel]) + + +TCommonModel = TypeVar("TCommonModel", bound=CommonModel) + + +def is_topic_value_acceptable(value: str | None) -> str | None: + """Check whether the given Kafka topic value is acceptable.""" + if value and "*" in value[:3]: + raise ValueError(f"Error on topic '{value}',, unacceptable value.") + return value + + +class KafkaRequestModel(RequirerCommonModel): + """Specialised model for Kafka.""" + + consumer_group_prefix: Annotated[str | None, AfterValidator(is_topic_value_acceptable)] = ( + Field(default=None) + ) + + +class KafkaResponseModel(ResourceProviderModel): + """Kafka response model.""" + + consumer_group_prefix: ExtraSecretStr = Field(default=None) + zookeeper_uris: ExtraSecretStr = Field(default=None) + + +############################################################################## +# AbstractRepository class +############################################################################## + + +class AbstractRepository(ABC): + """Abstract repository interface.""" + + @abstractmethod + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + ... + + @abstractmethod + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> str | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def get_field(self, field: str) -> str | None: + """Gets the value for one field.""" + ... + + @abstractmethod + def get_fields(self, *fields: str) -> dict[str, str | None]: + """Gets the values for all provided fields.""" + ... + + @abstractmethod + def write_field(self, field: str, value: Any) -> None: + """Writes the value in the field, without any secret support.""" + ... + + @abstractmethod + def write_fields(self, mapping: dict[str, Any]) -> None: + """Writes the values of mapping in the fields without any secret support (keys of mapping).""" + ... + + def write_secret_field( + self, field: str, value: Any, group: SecretGroup + ) -> CachedSecret | None: + """Writes a secret field.""" + ... + + @abstractmethod + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def delete_secret(self, label: str): + """Deletes a secret by its label.""" + ... + + @abstractmethod + def delete_field(self, field: str) -> None: + """Deletes a field.""" + ... + + @abstractmethod + def delete_fields(self, *fields: str) -> None: + """Deletes all the provided fields.""" + ... + + @abstractmethod + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + """Delete a field stored in a secret group.""" + ... + + @abstractmethod + def register_secret(self, secret_group: SecretGroup, short_uuid: str | None = None) -> None: + """Registers a secret using the repository.""" + ... + + @abstractmethod + def get_data(self) -> dict[str, Any] | None: + """Gets the whole data.""" + ... + + @abstractmethod + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Builds a secret field.""" + + +class OpsRepository(AbstractRepository): + """Implementation for ops repositories, with some methods left out.""" + + SECRET_FIELD_NAME: str + + IGNORES_GROUPS: list[SecretGroup] = [] + + uri_to_databag: bool = True + + def __init__( + self, + model: Model, + relation: Relation | None, + component: Unit | Application, + ): + self._local_app = model.app + self._local_unit = model.unit + self.relation = relation + self.component = component + self.model = model + self.secrets = SecretCache(model, component) + + @abstractmethod + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group mapping for secrets within a relation context.""" + ... + + @override + def get_data(self) -> dict[str, Any] | None: + ret: dict[str, Any] = {} + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + for key, value in self.relation.data[self.component].items(): + try: + ret[key] = json.loads(value) + except json.JSONDecodeError: + ret[key] = value + + return ret + + @override + @ensure_leader_for_app + def get_field( + self, + field: str, + ) -> str | None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + relation_data = self.relation.data[self.component] + return relation_data.get(field) + + @override + @ensure_leader_for_app + def get_fields(self, *fields: str) -> dict[str, str]: + res = {} + for field in fields: + if (value := self.get_field(field)) is not None: + res[field] = value + return res + + @override + @ensure_leader_for_app + def write_field(self, field: str, value: Any) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + if not value: + return None + self.relation.data[self.component].update({field: value}) + + @override + @ensure_leader_for_app + def write_fields(self, mapping: dict[str, Any]) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + (self.write_field(field, value) for field, value in mapping.items()) + + @override + @ensure_leader_for_app + def write_secret_field( + self, field: str, value: Any, secret_group: SecretGroup + ) -> CachedSecret | None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = self.get_field(self.secret_field(secret_group, field)) + + secret = self.secrets.get(label=label, uri=secret_uri) + if not secret: + return self.add_secret(field, value, secret_group) + else: + content = secret.get_content() + full_content = copy.deepcopy(content) + full_content.update({field: value}) + secret.set_content(full_content) + return secret + + @override + @ensure_leader_for_app + def delete_field(self, field: str) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + relation_data = self.relation.data[self.component] + try: + relation_data.pop(field) + except KeyError: + logger.debug( + f"Non existent field {field} was attempted to be removed from the databag (relation ID: {self.relation.id})" + ) + + @override + @ensure_leader_for_app + def delete_fields(self, *fields: str) -> None: + (self.delete_field(field) for field in fields) + + @override + @ensure_leader_for_app + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + relation_data = self.relation.data[self.component] + secret_field = self.secret_field(secret_group, field) + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = relation_data.get(secret_field) + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logging.error(f"Can't delete secret for relation {self.relation.id}") + return None + + content = secret.get_content() + new_content = copy.deepcopy(content) + try: + new_content.pop(field) + except KeyError: + logging.debug( + f"Non-existing secret '{field}' was attempted to be removed" + f"from relation {self.relation.id} and group {secret_group}" + ) + + # Write the new secret content if necessary + if new_content: + secret.set_content(new_content) + return + + # Remove the secret from the relation if it's fully gone. + try: + relation_data.pop(field) + except KeyError: + pass + self.secrets.remove(label) + return + + @ensure_leader_for_app + def register_secret(self, uri: str, secret_group: SecretGroup, short_uuid: str | None = None): + """Registers the secret group for this relation. + + [MAGIC HERE] + If we fetch a secret using get_secret(id=, label=), + then will be "stuck" on the Secret object, whenever it may + appear (i.e. as an event attribute, or fetched manually) on future occasions. + + This will allow us to uniquely identify the secret on Provider side (typically on + 'secret-changed' events), and map it to the corresponding relation. + """ + if not self.relation: + raise ValueError("Cannot register without relation.") + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + CachedSecret(self.model, self.component, label, uri).meta + + @override + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + return None + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + return self.secrets.get(label=label, uri=secret_uri) + + @override + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + uri: str | None = None, + short_uuid: str | None = None, + ) -> Any | None: + """Gets a value for a field stored in a secret group.""" + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + return None + + secret_field = self.secret_field(secret_group, field) + + relation_data = self.relation.data[self.component] + secret_uri = uri or relation_data.get(secret_field) + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + if self.uri_to_databag and not secret_uri: + logger.info(f"No secret for group {secret_group} in relation {self.relation}") + return None + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logger.info(f"No secret for group {secret_group} in relation {self.relation}") + return None + + content = secret.get_content().get(field) + + if not content: + return + + try: + return json.loads(content) + except json.JSONDecodeError: + return content + + @override + @ensure_leader_for_app + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + if not self.relation: + logger.info("No relation to get value from") + return None + + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + + label = self._generate_secret_label(self.relation, secret_group, short_uuid) + + secret = self.secrets.add(label, {field: value}, self.relation) + + if not secret.meta or not secret.meta.id: + logging.error("Secret is missing Secret ID") + raise SecretError("Secret added but is missing Secret ID") + + return secret + + @override + @ensure_leader_for_app + def delete_secret(self, label: str) -> None: + self.secrets.remove(label) + + +@final +class OpsRelationRepository(OpsRepository): + """Implementation of the Abstract Repository for non peer relations.""" + + SECRET_FIELD_NAME: str = "secret" + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + if short_uuid: + return f"{relation.name}.{relation.id}.{short_uuid}.{secret_group}.secret" + return f"{relation.name}.{relation.id}.{secret_group}.secret" + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + return f"{self.SECRET_FIELD_NAME}-{secret_group}" + + +class OpsPeerRepository(OpsRepository): + """Implementation of the Ops Repository for peer relations.""" + + SECRET_FIELD_NAME = "internal_secret" + + IGNORES_GROUPS = [ + SecretGroup("user"), + SecretGroup("entity"), + SecretGroup("mtls"), + SecretGroup("tls"), + ] + + uri_to_databag: bool = False + + @property + def scope(self) -> Scope: + """Returns a scope.""" + if isinstance(self.component, Application): + return Scope.APP + if isinstance(self.component, Unit): + return Scope.UNIT + raise ValueError("Invalid component, neither a Unit nor an Application") + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + members = [relation.name, self._local_app.name, self.scope.value] + + if secret_group != SecretGroup("extra"): + members.append(secret_group) + return f"{'.'.join(members)}" + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + if not field: + raise ValueError("Must have a field.") + return f"{field}@{secret_group}" + + +@final +class OpsPeerUnitRepository(OpsPeerRepository): + """Implementation for a unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + super().__init__(model, relation, component) + + +@final +class OpsOtherPeerUnitRepository(OpsPeerRepository): + """Implementation for a remote unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + if component == model.unit: + raise ValueError(f"Can't instantiate {self.__class__.__name__} with local unit.") + super().__init__(model, relation, component) + + @override + def write_field(self, field: str, value: Any) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def write_fields(self, mapping: dict[str, Any]) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def add_secret( + self, field: str, value: Any, secret_group: SecretGroup, short_uuid: str | None = None + ) -> CachedSecret | None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_field(self, field: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_fields(self, *fields: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + +TRepository = TypeVar("TRepository", bound=OpsRepository) +TCommon = TypeVar("TCommon", bound=BaseModel) +TPeerCommon = TypeVar("TPeerCommon", bound=PeerModel) +TCommonBis = TypeVar("TCommonBis", bound=BaseModel) + + +class RepositoryInterface(Generic[TRepository, TCommon]): + """Repository builder.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + component: Unit | Application, + repository_type: type[TRepository], + model: type[TCommon] | TypeAdapter | None, + ): + self.charm = charm + self._model = charm.model + self.repository_type = repository_type + self.relation_name = relation_name + self.model = model + self.component = component + + @property + def relations(self) -> list[Relation]: + """The list of Relation instances associated with this relation name.""" + return self._model.relations[self.relation_name] + + def repository( + self, relation_id: int, component: Unit | Application | None = None + ) -> TRepository: + """Returns a repository for the relation.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + return self.repository_type(self._model, relation, component or self.component) + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommon], + component: Unit | Application | None = None, + ) -> TCommon: ... + + @overload + def build_model( + self, + relation_id: int, + model: TypeAdapter[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: None = None, + component: Unit | Application | None = None, + ) -> TCommon: ... + + def build_model( + self, + relation_id: int, + model: type[TCommon] | TypeAdapter[TCommonBis] | None = None, + component: Unit | Application | None = None, + ) -> TCommon | TCommonBis: + """Builds a model using the repository for that relation.""" + model = model or self.model # First the provided model (allows for specialisation) + component = component or self.component + if not model: + raise ValueError("Missing model to specialise data") + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + return build_model(self.repository_type(self._model, relation, component), model) + + def write_model( + self, relation_id: int, model: BaseModel, context: dict[str, str] | None = None + ): + """Writes the model using the repository.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + + write_model( + self.repository_type(self._model, relation, self.component), model, context=context + ) + + +class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, TCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.app, OpsRelationRepository, model) + + +class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.app, OpsPeerRepository, model) + + +class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): + """Specialised Interface to build repositories for this unit peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.unit, OpsPeerUnitRepository, model) + + +class OpsOtherPeerUnitRepositoryInterface( + RepositoryInterface[OpsOtherPeerUnitRepository, TPeerCommon] +): + """Specialised Interface to build repositories for another unit peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + unit: Unit, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, unit, OpsOtherPeerUnitRepository, model) + + +############################################################################## +# DDD implementation methods +############################################################################## +############################################################################## + + +def build_model(repository: AbstractRepository, model: type[TCommon] | TypeAdapter) -> TCommon: + """Builds a common model using the provided repository and provided model structure.""" + data = repository.get_data() or {} + + data.pop("data", None) + + # Beware this means all fields should have a default value here. + if isinstance(model, TypeAdapter): + return model.validate_python(data, context={"repository": repository}) + + return model.model_validate(data, context={"repository": repository}) + + +def write_model( + repository: AbstractRepository, model: BaseModel, context: dict[str, str] | None = None +): + """Writes the data stored in the model using the repository object.""" + context = context or {} + dumped = model.model_dump( + mode="json", context={"repository": repository} | context, exclude_none=False + ) + for field, value in dumped.items(): + if value is None: + repository.delete_field(field) + continue + dumped_value = value if isinstance(value, str) else json.dumps(value) + repository.write_field(field, dumped_value) + + +############################################################################## +# Custom Events +############################################################################## + + +class ResourceProviderEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.request = request + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["request"] = pickle.dumps(self.request) + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.request = pickle.loads(snapshot["request"]) + + +class ResourceRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource requested event.""" + + pass + + +class ResourceEntityRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource Entity requested event.""" + + pass + + +class ResourceEntityPermissionsChangedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + pass + + +class MtlsCertUpdatedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + old_mtls_cert: str | None = None, + ): + super().__init__(handle, relation, app, unit, request) + + self.old_mtls_cert = old_mtls_cert + + def snapshot(self): + """Return a snapshot of the event.""" + return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} + + def restore(self, snapshot): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.old_mtls_cert = snapshot["old_mtls_cert"] + + +class BulkResourcesRequestedEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + requests: list[TRequirerCommonModel], + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.requests = requests + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["requests"] = [pickle.dumps(request) for request in self.requests] + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.requests = [pickle.loads(request) for request in snapshot["requests"]] + + +class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + bulk_resources_requested = EventSource(BulkResourcesRequestedEvent) + resource_requested = EventSource(ResourceRequestedEvent) + resource_entity_requested = EventSource(ResourceEntityRequestedEvent) + resource_entity_permissions_changed = EventSource(ResourceEntityPermissionsChangedEvent) + mtls_cert_updated = EventSource(MtlsCertUpdatedEvent) + + +class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): + """Resource created/changed event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, response + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + response: TResourceProviderModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.response = response + + def snapshot(self) -> dict: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["response"] = pickle.dumps(self.response) + return snapshot + + def restore(self, snapshot: dict): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + + self.response = pickle.loads(snapshot["response"]) + + +class ResourceCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource has been created.""" + + pass + + +class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource entity has been created.""" + + pass + + +class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read/Write enpoints are changed.""" + + pass + + +class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read-only enpoints are changed.""" + + pass + + +class AuthenticationUpdatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Authentication was updated for a user.""" + + pass + + +class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + resource_created = EventSource(ResourceCreatedEvent) + resource_entity_created = EventSource(ResourceEntityCreatedEvent) + endpoints_changed = EventSource(ResourceEndpointsChangedEvent) + read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) + authentication_updated = EventSource(AuthenticationUpdatedEvent) + + +############################################################################## +# Event Handlers +############################################################################## + + +class EventHandlers(Object): + """Requires-side of the relation.""" + + component: Application | Unit + interface: RepositoryInterface + + def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ""): + """Manager of base client relations.""" + if not unique_key: + unique_key = relation_name + super().__init__(charm, unique_key) + + self.charm = charm + self.relation_name = relation_name + + self.framework.observe( + charm.on[self.relation_name].relation_changed, + self._on_relation_changed_event, + ) + + self.framework.observe( + self.charm.on[self.relation_name].relation_created, + self._on_relation_created_event, + ) + + self.framework.observe( + charm.on.secret_changed, + self._on_secret_changed_event, + ) + + @property + def relations(self) -> list[Relation]: + """Shortcut to get access to the relations.""" + return self.interface.relations + + # Event handlers + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + pass + + @abstractmethod + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _handle_event( + self, + ): + """Handles the event and reacts accordingly.""" + pass + + def compute_diff( + self, + relation: Relation, + request: RequirerCommonModel | ResourceProviderModel, + repository: AbstractRepository | None = None, + store: bool = True, + ) -> Diff: + """Computes, stores and returns a diff for that request.""" + if not repository: + repository = OpsRelationRepository(self.model, relation, component=relation.app) + + # Gets the data stored in the databag for diff computation + old_data = get_encoded_dict(relation, self.component, "data") + + # In case we're V1, we select specifically this request + if old_data and request.request_id: + old_data: dict | None = old_data.get(request.request_id, None) + + # dump the data of the current request so we can compare + new_data = request.model_dump( + mode="json", + exclude={"data"}, + exclude_none=True, + exclude_defaults=True, + ) + + # Computes the diff + _diff = diff(old_data, new_data) + + if store: + # Update the databag with the new data for later diff computations + store_new_data(relation, self.component, new_data, short_uuid=request.request_id) + + return _diff + + def _relation_from_secret_label(self, secret_label: str) -> Relation | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 3): + return + + try: + relation_id = int(contents[1]) + except ValueError: + return + + relation_name = contents[0] + + try: + return self.model.get_relation(relation_name, relation_id) + except ModelError: + return + + def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 5): + return + + return contents[2] + + +class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): + """Event Handler for resource provider.""" + + on = ResourceProvidesEvents[TRequirerCommonModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + request_model: type[TRequirerCommonModel], + unique_key: str = "", + mtls_enabled: bool = False, + bulk_event: bool = False, + ): + """Builds a resource provider event handler. + + Args: + charm: The charm. + relation_name: The relation name this event handler is listening to. + request_model: The request model that is expected to be received. + unique_key: An optional unique key for that object. + mtls_enabled: If True, means the server supports MTLS integration. + bulk_event: If this is true, only one event will be emitted with all requests in the case of a v1 requirer. + """ + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.app + self.request_model = request_model + self.interface = OpsRelationRepositoryInterface(charm, relation_name, request_model) + self.mtls_enabled = mtls_enabled + self.bulk_event = bulk_event + + @staticmethod + def _validate_diff(event: RelationEvent, _diff: Diff) -> None: + """Validates that entity information is not changed after relation is established. + + - When entity-type changes, backwards compatibility is broken. + - When extra-user-roles changes, role membership checks become incredibly complex. + - When extra-group-roles changes, role membership checks become incredibly complex. + """ + if not isinstance(event, RelationChangedEvent): + return + + for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: + if key in _diff.changed: + raise ValueError(f"Cannot change {key} after relation has already been created") + + def _dispatch_events(self, event: RelationEvent, _diff: Diff, request: RequirerCommonModel): + if self.mtls_enabled and "secret-mtls" in _diff.added: + getattr(self.on, "mtls_cert_updated").emit( + event.relation, app=event.app, unit=event.unit, request=request, old_mtls_cert=None + ) + return + # Emit a resource requested event if the setup key (resource name) + # was added to the relation databag, but the entity-type key was not. + if resource_added(_diff) and "entity-type" not in _diff.added: + getattr(self.on, "resource_requested").emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (resource name) + # was added to the relation databag, in addition to the entity-type key. + if resource_added(_diff) and "entity-type" in _diff.added: + getattr(self.on, "resource_entity_requested").emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (resource name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + not resource_added(_diff) + and "entity-type" not in _diff.added + and ("entity-permissions" in _diff.added or "entity-permissions" in _diff.changed) + ): + getattr(self.on, "resource_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit, request=request + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request: RequirerCommonModel, + ): + _diff = self.compute_diff(event.relation, request, repository) + + self._validate_diff(event, _diff) + self._dispatch_events(event, _diff, request) + + def _handle_bulk_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request_model: RequirerDataContractV1[TRequirerCommonModel], + ): + """Validate all the diffs, then dispatch the bulk event AND THEN stores the diff. + + This allows for the developer to process the diff and store it themselves + """ + for request in request_model.requests: + # Compute the diff without storing it so we can validate the diffs. + _diff = self.compute_diff(event.relation, request, repository, store=False) + self._validate_diff(event, _diff) + + getattr(self.on, "bulk_resources_requested").emit( + event.relation, app=event.app, unit=event.unit, requests=request_model.requests + ) + + # Store all the diffs if they were not already stored. + for request in request_model.requests: + new_data = request.model_dump( + mode="json", + exclude={"data"}, + context={"repository": repository}, + exclude_none=True, + exclude_defaults=True, + ) + store_new_data(event.relation, self.component, new_data, request.request_id) + + @override + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + if not self.mtls_enabled: + logger.info("MTLS is disabled, exiting early.") + return + if not event.secret.label: + return + + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + old_mtls_cert = event.secret.get_content().get("mtls-cert") + logger.info("mtls-cert-updated") + + # V0, just fire the event. + if version == "v0": + request = build_model(repository, RequirerDataContractV0) + # V1, find the corresponding request. + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if not short_uuid: + return + for _request in request_model.requests: + if _request.request_id == short_uuid: + request = _request + break + else: + logger.info(f"Unknown request id {short_uuid}") + return + + getattr(self.on, "mtls_cert_updated").emit( + relation, + app=relation.app, + unit=remote_unit, + request=request, + mtls_cert=old_mtls_cert, + ) + + @override + def _on_relation_changed_event(self, event: RelationChangedEvent): + if not self.charm.unit.is_leader(): + return + + repository = OpsRelationRepository( + self.model, event.relation, component=event.relation.app + ) + + # Don't do anything until we get some data + if not repository.get_data(): + return + + version = repository.get_field("version") or "v0" + if version == "v0": + request_model = build_model(repository, RequirerDataContractV0) + old_name = request_model.original_field + request_model.request_id = None # For safety, let's ensure that we don't have a model. + self._handle_event(event, repository, request_model) + logger.info( + f"Patching databag for v0 compatibility: replacing 'resource' by '{old_name}'" + ) + self.interface.repository( + event.relation.id, + ).write_field(old_name, request_model.resource) + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if self.bulk_event: + self._handle_bulk_event(event, repository, request_model) + return + for request in request_model.requests: + self._handle_event(event, repository, request) + + def set_response(self, relation_id: int, response: ResourceProviderModel): + r"""Sets a response in the databag. + + This function will react accordingly to the version number. + If the version number is v0, then we write the data directly in the databag. + If the version number is v1, then we write the data in the list of responses. + + /!\ This function updates a response if it was already present in the databag! + + Args: + relation_id: The specific relation id for that event. + response: The response to write in the databag. + """ + if not self.charm.unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + raise ValueError("Missing relation.") + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + if version == "v0": + # Ensure the request_id is None + response.request_id = None + self.interface.write_model( + relation_id, response, context={"version": "v0"} + ) # {"database": "database-name", "secret-user": "uri", ...} + return + + model = self.interface.build_model(relation_id, DataContractV1[response.__class__]) + + # for/else syntax allows to execute the else if break was not called. + # This allows us to update or append easily. + for index, _response in enumerate(model.requests): + if _response.request_id == response.request_id: + model.requests[index] = response + break + else: + model.requests.append(response) + + self.interface.write_model(relation_id, model) + return + + +class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): + """Event Handler for resource requirer.""" + + on = ResourceRequiresEvents[TResourceProviderModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + requests: list[RequirerCommonModel], + response_model: type[TResourceProviderModel], + unique_key: str = "", + relation_aliases: list[str] | None = None, + ): + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.unit + self.relation_aliases = relation_aliases + self._requests = requests + self.response_model = DataContractV1[response_model] + self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( + OpsRelationRepositoryInterface(charm, relation_name, self.response_model) + ) + + if requests: + self._request_model = requests[0].__class__ + else: + self._request_model = RequirerCommonModel + + # First, check that the number of aliases matches the one defined in charm metadata. + if self.relation_aliases: + relation_connection_limit = self.charm.meta.requires[relation_name].limit + if len(self.relation_aliases) != relation_connection_limit: + raise ValueError( + f"Invalid number of aliases, expected {relation_connection_limit}, received {len(self.relation_aliases)}" + ) + + # Created custom event names for each alias. + if self.relation_aliases: + for relation_alias in self.relation_aliases: + self.on.define_event( + f"{relation_alias}_resource_created", + ResourceCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_resource_entity_created", + ResourceEntityCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_endpoints_changed", + ResourceEndpointsChangedEvent, + ) + self.on.define_event( + f"{relation_alias}_read_only_endpoints_changed", + ResourceReadOnlyEndpointsChangedEvent, + ) + + ############################################################################## + # Extra useful functions + ############################################################################## + def is_resource_created( + self, + rel_id: int, + request_id: str, + model: DataContractV1[TResourceProviderModel] | None = None, + ) -> bool: + """Checks if a resource has been created or not. + + Args: + rel_id: The relation id to check. + request_id: The specific request id to check. + model: An optional model to use (for performances). + """ + if not model: + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + for request in model.requests: + if request.request_id == request_id: + return request.secret_user is not None or request.secret_entity is not None + return False + + def are_all_resources_created(self, rel_id: int) -> bool: + """Checks that all resources have been created for a relation. + + Args: + rel_id: The relation id to check. + """ + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + return all( + self.is_resource_created(rel_id, request.request_id, model) + for request in model.requests + if request.request_id + ) + + @staticmethod + def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: + # Actual checking method. + # No need to check for psycopg here, it's been checked before. + if not psycopg2: + return False + + try: + with psycopg2.connect(connection_string) as connection: + with connection.cursor() as cursor: + cursor.execute( + "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) + ) + return cursor.fetchone() is not None + except psycopg2.Error as e: + logger.exception( + f"failed to check whether {plugin} plugin is enabled in the database: %s", + str(e), + ) + return False + + def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: + """Returns whether a plugin is enabled in the database. + + Args: + plugin: name of the plugin to check. + relation_index: Optional index to check the database (default: 0 - first relation). + """ + if not psycopg2: + return False + + # Can't check a non existing relation. + if len(self.relations) <= relation_index: + return False + + relation = self.relations[relation_index] + model = self.interface.build_model(relation_id=relation.id, component=relation.app) + for request in model.requests: + if request.endpoints and request.username and request.password: + host = request.endpoints.split(":")[0] + username = request.username.get_secret_value() + password = request.password.get_secret_value() + + connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" + return self._is_pg_plugin_enabled(plugin, connection_string) + logger.info("No valid request to use to check for plugin.") + return False + + ############################################################################## + # Helpers for aliases + ############################################################################## + + def _assign_relation_alias(self, relation_id: int) -> None: + """Assigns an alias to a relation. + + This function writes in the unit data bag. + + Args: + relation_id: the identifier for a particular relation. + """ + # If no aliases were provided, return immediately. + if not self.relation_aliases: + return + + # Return if an alias was already assigned to this relation + # (like when there are more than one unit joining the relation). + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation and relation.data[self.charm.unit].get("alias"): + return + + # Retrieve the available aliases (the ones that weren't assigned to any relation). + available_aliases = self.relation_aliases[:] + for relation in self.charm.model.relations[self.relation_name]: + alias = relation.data[self.charm.unit].get("alias") + if alias: + logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) + available_aliases.remove(alias) + + # Set the alias in the unit relation databag of the specific relation. + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation: + relation.data[self.charm.unit].update({"alias": available_aliases[0]}) + + # We need to set relation alias also on the application level so, + # it will be accessible in show-unit juju command, executed for a consumer application unit + if relation and self.charm.unit.is_leader(): + relation.data[self.charm.app].update({"alias": available_aliases[0]}) + + def _emit_aliased_event( + self, event: RelationChangedEvent, event_name: str, response: ResourceProviderModel + ): + """Emit all aliased events.""" + alias = self._get_relation_alias(event.relation.id) + if alias: + getattr(self.on, f"{alias}_{event_name}").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + + def _get_relation_alias(self, relation_id: int) -> str | None: + """Gets the relation alias for a relation id.""" + for relation in self.charm.model.relations[self.relation_name]: + if relation.id == relation_id: + return relation.data[self.charm.unit].get("alias") + return None + + ############################################################################## + # Event Handlers + ############################################################################## + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + + response_model = self.interface.build_model(relation.id) + if not short_uuid: + return + for _response in response_model.requests: + if _response.request_id == short_uuid: + response = _response + break + else: + logger.info(f"Unknown request id {short_uuid}") + return + + getattr(self.on, "authentication_updated").emit( + relation, + app=relation.app, + unit=remote_unit, + response=response, + ) + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the database relation is created.""" + super()._on_relation_created_event(event) + + repository = OpsRelationRepository(self.model, event.relation, self.charm.app) + + # If relations aliases were provided, assign one to the relation. + self._assign_relation_alias(event.relation.id) + + if not self.charm.unit.is_leader(): + return + + # Generate all requests id so they are saved already. + for request in self._requests: + request.request_id = gen_hash(request.resource, request.salt) + + full_request = RequirerDataContractV1[self._request_model]( + version="v1", requests=self._requests + ) + write_model(repository, full_request) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the database relation has changed.""" + is_subordinate = False + remote_unit_data = None + for key in event.relation.data.keys(): + if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): + remote_unit_data = event.relation.data[key] + elif isinstance(key, Application) and key.name != self.charm.app.name: + is_subordinate = event.relation.data[key].get("subordinated") == "true" + + if is_subordinate: + if not remote_unit_data or remote_unit_data.get("state") != "ready": + return + + repository = self.interface.repository(event.relation.id, event.app) + response_model = self.interface.build_model(event.relation.id, component=event.app) + + if not response_model.requests: + logger.info("Still waiting for data.") + return + + data = repository.get_field("data") + if not data: + logger.info("Missing data to compute diffs") + return + + request_map = TypeAdapter(dict[str, self._request_model]).validate_json(data) + + for response in response_model.requests: + response_id = response.request_id or gen_hash(response.resource, response.salt) + request = request_map.get(response_id, None) + if not request: + raise ValueError( + f"No request matching the response with response_id {response_id}" + ) + self._handle_event(event, repository, request, response) + + ############################################################################## + # Methods to handle specificities of relation events + ############################################################################## + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: OpsRelationRepository, + request: RequirerCommonModel, + response: ResourceProviderModel, + ): + _diff = self.compute_diff(event.relation, response, repository, store=True) + + for newval in _diff.added: + if secret_group := response._get_secret_field(newval): + uri = getattr(response, newval.replace("-", "_")) + repository.register_secret(uri, secret_group, response.request_id) + + if "secret-user" in _diff.added and not request.entity_type: + logger.info(f"resource {response.resource} created at {datetime.now()}") + getattr(self.on, "resource_created").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "resource_created", response) + return + + if "secret-entity" in _diff.added and request.entity_type: + logger.info(f"entity {response.entity_name} created at {datetime.now()}") + getattr(self.on, "resource_entity_created").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "resource_entity_created", response) + return + + if "endpoints" in _diff.added or "endpoints" in _diff.changed: + logger.info(f"endpoints changed at {datetime.now()}") + getattr(self.on, "endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "endpoints_changed", response) + return + + if "read-only-endpoints" in _diff.added or "read-only-endpoints" in _diff.changed: + logger.info(f"read-only-endpoints changed at {datetime.now()}") + getattr(self.on, "read_only_endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "read_only_endpoints_changed", response) + return diff --git a/tests/v1/integration/kafka-connect-charm/charmcraft.yaml b/tests/v1/integration/kafka-connect-charm/charmcraft.yaml index b4317391..ad2c056e 100644 --- a/tests/v1/integration/kafka-connect-charm/charmcraft.yaml +++ b/tests/v1/integration/kafka-connect-charm/charmcraft.yaml @@ -2,12 +2,11 @@ # See LICENSE file for licensing details. type: charm -# Whenever "bases" is changed: -# - Update tests/integration/conftest.py::pytest_configure() -# - Update .github/workflow/ci.yaml integration-test matrix +base: ubuntu@22.04 platforms: - ubuntu@22.04:amd64: - ubuntu@24.04:amd64: + amd64: + build-on: [amd64] + build-for: [amd64] # Files implicitly created by charmcraft without a part: # - dispatch (https://github.com/canonical/charmcraft/pull/1898) @@ -59,14 +58,7 @@ parts: - libssl-dev # Needed to build Python dependencies with Rust from source - pkg-config # Needed to build Python dependencies with Rust from source override-build: | - # Workaround for https://github.com/canonical/charmcraft/issues/2068 - # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source - if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] - then - snap install rustup --classic - else - apt-get install rustup -y - fi + snap install rustup --classic # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu # archive—which means the rustup version could be updated at any time. Print rustup version diff --git a/tests/v1/integration/kafka-connect-charm/lib/charms/data_platform_libs/v1/data_interfaces.py b/tests/v1/integration/kafka-connect-charm/lib/charms/data_platform_libs/v1/data_interfaces.py new file mode 100644 index 00000000..e22388d1 --- /dev/null +++ b/tests/v1/integration/kafka-connect-charm/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -0,0 +1,2753 @@ +# Copyright 2025 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +r"""Library to manage the relation for the data-platform products. + +This V1 has been specified in https://docs.google.com/document/d/1lnuonWnoQb36RWYwfHOBwU0VClLbawpTISXIC_yNKYo, and should be backward compatible with v0 clients. + +This library contains the Requires and Provides classes for handling the relation +between an application and multiple managed application supported by the data-team: +MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. + +#### Models + +This library exposes basic default models that can be used in most cases. +If you need more complex models, you can subclass them. + +```python +from charms.data_platform_libs.v1.data_interfaces import RequirerCommonModel, ExtraSecretStr + +class ExtendedCommonModel(RequirerCommonModel): + operator_password: ExtraSecretStr +``` + +Secret groups are handled using annotated types. If you wish to add extra secret groups, please follow the following model. The string metadata represents the secret group name, and `OptionalSecretStr` is a TypeAlias for `SecretStr | None`. Finally, `SecretStr` represents a field validating the URI pattern `secret:.*` + +```python +MyGroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] +``` + +Fields not specified as OptionalSecretStr and extended with a group name in the metadata will NOT get serialised. + + +#### Requirer Charm + +This library is a uniform interface to a selection of common database +metadata, with added custom events that add convenience to database management, +and methods to consume the application related data. + + +```python +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ClientCharm(CharmBase): + # Database charm that accepts connections from application charms. + def __init__(self, *args) -> None: + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + RequirerCommonModel( + entity_type="USER", + ) + ] + self.database = ResourceRequirerEventHandler( + self,"database", requests, response_model=ResourceProviderModel + ) + self.framework.observe(self.database.on.resource_created, self._on_resource_created) + self.framework.observe(self.database.on.resource_entity_created, self._on_resource_entity_created) + + def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new database is created. + relation_id = event.relation.id + response = event.response # This is the response model + + username = event.response.username + password = event.response.password + ... + + def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new entity is created. + ... + +Compared to V1, this library makes heavy use of pydantic models, and allows for +multiple requests, specified as a list. +On the Requirer side, each response will trigger one custom event for that response. +This way, it allows for more strategic events to be emitted according to the request. + +As show above, the library provides some custom events to handle specific situations, which are listed below: +- resource_created: event emitted when the requested database is created. +- resource_entity_created: event emitted when the requested entity is created. +- endpoints_changed: event emitted when the read/write endpoints of the database have changed. +- read_only_endpoints_changed: event emitted when the read-only endpoints of the database + have changed. Event is not triggered if read/write endpoints changed too. + +If it is needed to connect multiple database clusters to the same relation endpoint +the application charm can implement the same code as if it would connect to only +one database cluster (like the above code example). + +To differentiate multiple clusters connected to the same relation endpoint +the application charm can use the name of the remote application: + +```python + +def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Get the remote app name of the cluster that triggered this event + cluster = event.relation.app.name +``` + +It is also possible to provide an alias for each different database cluster/relation. + +So, it is possible to differentiate the clusters in two ways. +The first is to use the remote application name, i.e., `event.relation.app.name`, as above. + +The second way is to use different event handlers to handle each cluster events. +The implementation would be something like the following code: + +```python + +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ApplicationCharm(CharmBase): + # Application charm that connects to database charms. + + def __init__(self, *args): + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + ] + # Define the cluster aliases and one handler for each cluster database created event. + self.database = ResourceRequirerEventHandler( + self, + relation_name="database" + relations_aliases = ["cluster1", "cluster2"], + requests= + ) + self.framework.observe( + self.database.on.cluster1_resource_created, self._on_cluster1_resource_created + ) + self.framework.observe( + self.database.on.cluster2_resource_created, self._on_cluster2_resource_created + ) + + def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster1 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... + + def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster2 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... +``` + +### Provider Charm + +Following an example of using the ResourceRequestedEvent, in the context of the +database charm code: + +```python +from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides + +class SampleCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + # Charm events defined in the database provides charm library. + self.provided_database = DatabaseProvides(self, relation_name="database") + self.framework.observe(self.provided_database.on.database_requested, + self._on_database_requested) + # Database generic helper + self.database = DatabaseHelper() + + def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: + # Handle the event triggered by a new database requested in the relation + # Retrieve the database name using the charm library. + db_name = event.database + # generate a new user credential + username = self.database.generate_user() + password = self.database.generate_password() + # set the credentials for the relation + self.provided_database.set_credentials(event.relation.id, username, password) + # set other variables for the relation event.set_tls("False") +``` + +As shown above, the library provides a custom event (database_requested) to handle +the situation when an application charm requests a new database to be created. +It's preferred to subscribe to this event instead of relation changed event to avoid +creating a new database when other information other than a database name is +exchanged in the relation databag. + +""" + +import copy +import hashlib +import json +import logging +import pickle +import random +import string +from abc import ABC, abstractmethod +from collections import namedtuple +from datetime import datetime +from enum import Enum +from typing import ( + Annotated, + Any, + ClassVar, + Generic, + Literal, + NewType, + TypeAlias, + TypeVar, + final, + overload, +) + +from ops import ( + CharmBase, + EventBase, + Model, + RelationChangedEvent, + RelationCreatedEvent, + RelationEvent, + Secret, + SecretChangedEvent, + SecretInfo, + SecretNotFoundError, +) +from ops.charm import CharmEvents +from ops.framework import EventSource, Handle, Object +from ops.model import Application, ModelError, Relation, Unit +from pydantic import ( + AfterValidator, + AliasChoices, + BaseModel, + ConfigDict, + Discriminator, + Field, + SecretStr, + SerializationInfo, + SerializerFunctionWrapHandler, + Tag, + TypeAdapter, + ValidationInfo, + model_serializer, + model_validator, +) +from pydantic.types import _SecretBase, _SecretField +from pydantic_core import CoreSchema, core_schema +from typing_extensions import TypeAliasType, override + +try: + import psycopg2 +except ImportError: + psycopg2 = None + +# The unique Charmhub library identifier, never change it +LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" + +# Increment this major API version when introducing breaking changes +LIBAPI = 1 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 0 + +PYDEPS = ["ops>=2.0.0", "pydantic>=2.11"] + +logger = logging.getLogger(__name__) + +MODEL_ERRORS = { + "not_leader": "this unit is not the leader", + "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", + "owner_no_refresh": "ERROR secret owner cannot use --refresh", +} + +RESOURCE_ALIASES = [ + "database", + "subject", + "topic", + "index", + "plugin-url", +] + +SECRET_PREFIX = "secret-" + + +############################################################################## +# Exceptions +############################################################################## + + +class DataInterfacesError(Exception): + """Common ancestor for DataInterfaces related exceptions.""" + + +class SecretError(DataInterfacesError): + """Common ancestor for Secrets related exceptions.""" + + +class SecretAlreadyExistsError(SecretError): + """A secret that was to be added already exists.""" + + +class SecretsUnavailableError(SecretError): + """Secrets aren't yet available for Juju version used.""" + + +class IllegalOperationError(DataInterfacesError): + """To be used when an operation is not allowed to be performed.""" + + +############################################################################## +# Global helpers / utilities +############################################################################## + + +def gen_salt() -> str: + """Generates a consistent salt.""" + return "".join(random.choices(string.ascii_letters + string.digits, k=16)) + + +def gen_hash(resource_name: str, salt: str) -> str: + """Generates a consistent hash based on the resource name and salt.""" + hasher = hashlib.sha256() + hasher.update(f"{resource_name}:{salt}".encode()) + return hasher.hexdigest()[:16] + + +def ensure_leader_for_app(f): + """Decorator to ensure that only leader can perform given operation.""" + + def wrapper(self, *args, **kwargs): + if self.component == self._local_app and not self._local_unit.is_leader(): + logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") + return + return f(self, *args, **kwargs) + + wrapper.leader_only = True + return wrapper + + +def get_encoded_dict( + relation: Relation, member: Unit | Application, field: str +) -> dict[str, Any] | None: + """Retrieve and decode an encoded field from relation data.""" + data = json.loads(relation.data[member].get(field, "{}")) + if isinstance(data, dict): + return data + logger.error("Unexpected datatype for %s instead of dict.", str(data)) + + +Diff = namedtuple("Diff", ["added", "changed", "deleted"]) +Diff.__doc__ = """ +A tuple for storing the diff between two data mappings. + +added - keys that were added +changed - keys that still exist but have new values +deleted - key that were deleted""" + + +def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: + """Retrieves the diff of the data in the relation changed databag for v1. + + Args: + old_data: dictionary of the stored data before the event. + new_data: dictionary of the received data to compute the diff. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + old_data = old_data or {} + + # These are the keys that were added to the databag and triggered this event. + added = new_data.keys() - old_data.keys() + # These are the keys that were removed from the databag and triggered this event. + deleted = old_data.keys() - new_data.keys() + # These are the keys that already existed in the databag, + # but had their values changed. + changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} + # Return the diff with all possible changes. + return Diff(added, changed, deleted) + + +def resource_added(diff: Diff) -> bool: + """Ensures that one of the aliased resources has been added.""" + return any(item in diff.added for item in RESOURCE_ALIASES + ["resource"]) + + +def store_new_data( + relation: Relation, + component: Unit | Application, + new_data: dict[str, str], + short_uuid: str | None = None, +): + """Stores the new data in the databag for diff computation.""" + # First, the case for V0 + if not short_uuid: + relation.data[component].update({"data": json.dumps(new_data)}) + # Then the case for V1, where we have a ShortUUID + else: + data = json.loads(relation.data[component].get("data", "{}")) + if not isinstance(data, dict): + raise ValueError + newest_data = copy.deepcopy(data) + newest_data[short_uuid] = new_data + relation.data[component].update({"data": json.dumps(newest_data)}) + + +############################################################################## +# Helper classes +############################################################################## + +SecretGroup = NewType("SecretGroup", str) + + +SecretString = TypeAliasType("SecretString", Annotated[str, Field(pattern="secret:.*")]) + + +class SecretBool(_SecretField[bool]): + """Class for booleans as secrets.""" + + _inner_schema: ClassVar[CoreSchema] = core_schema.bool_schema() + _error_kind: ClassVar[str] = "bool_type" + + def _display(self) -> str: + return "****" + + +OptionalSecretStr: TypeAlias = SecretStr | None +OptionalSecretBool: TypeAlias = SecretBool | None + +OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) + +UserSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "user"] +TlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "tls"] +TlsSecretBool = Annotated[OptionalSecretBool, Field(exclude=True, default=None), "tls"] +MtlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mtls"] +ExtraSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "extra"] +EntitySecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "entity"] + + +class Scope(Enum): + """Peer relations scope.""" + + APP = "app" + UNIT = "unit" + + +class CachedSecret: + """Locally cache a secret. + + The data structure is precisely reusing/simulating as in the actual Secret Storage + """ + + KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] + + def __init__( + self, + model: Model, + component: Application | Unit, + label: str, + secret_uri: str | None = None, + ): + self._secret_meta = None + self._secret_content = {} + self._secret_uri = secret_uri + self.label = label + self._model = model + self.component = component + self.current_label = None + + @property + def meta(self) -> Secret | None: + """Getting cached secret meta-information.""" + if not self._secret_meta: + if not (self._secret_uri or self.label): + return + + try: + self._secret_meta = self._model.get_secret(label=self.label) + except SecretNotFoundError: + # Falling back to seeking for potential legacy labels + logger.info(f"Secret with label {self.label} not found") + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) + return self._secret_meta + + ########################################################################## + # Public functions + ########################################################################## + + def add_secret( + self, + content: dict[str, str], + relation: Relation | None = None, + label: str | None = None, + ) -> Secret: + """Create a new secret.""" + if self._secret_uri: + raise SecretAlreadyExistsError( + "Secret is already defined with uri %s", self._secret_uri + ) + + label = self.label if not label else label + + secret = self.component.add_secret(content, label=label) + if relation and relation.app != self._model.app: + # If it's not a peer relation, grant is to be applied + secret.grant(relation) + self._secret_uri = secret.id + self._secret_meta = secret + return self._secret_meta + + def get_content(self) -> dict[str, str]: + """Getting cached secret content.""" + if not self._secret_content: + if self.meta: + try: + self._secret_content = self.meta.get_content(refresh=True) + except (ValueError, ModelError) as err: + # https://bugs.launchpad.net/juju/+bug/2042596 + # Only triggered when 'refresh' is set + if isinstance(err, ModelError) and not any( + msg in str(err) for msg in self.KNOWN_MODEL_ERRORS + ): + raise + # Due to: ValueError: Secret owner cannot use refresh=True + self._secret_content = self.meta.get_content() + return self._secret_content + + def set_content(self, content: dict[str, str]) -> None: + """Setting cached secret content.""" + if not self.meta: + return + + if content == self.get_content(): + return + + if content: + self.meta.set_content(content) + self._secret_content = content + else: + self.meta.remove_all_revisions() + + def get_info(self) -> SecretInfo | None: + """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" + if self.meta: + return self.meta.get_info() + + def remove(self) -> None: + """Remove secret.""" + if not self.meta: + raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") + try: + self.meta.remove_all_revisions() + except SecretNotFoundError: + pass + self._secret_content = {} + self._secret_meta = None + self._secret_uri = None + + +class SecretCache: + """A data structure storing CachedSecret objects.""" + + def __init__(self, model: Model, component: Application | Unit): + self._model = model + self.component = component + self._secrets: dict[str, CachedSecret] = {} + + def get(self, label: str, uri: str | None = None) -> CachedSecret | None: + """Getting a secret from Juju Secret store or cache.""" + if not self._secrets.get(label): + secret = CachedSecret(self._model, self.component, label, uri) + if secret.meta: + self._secrets[label] = secret + return self._secrets.get(label) + + def add(self, label: str, content: dict[str, str], relation: Relation) -> CachedSecret: + """Adding a secret to Juju Secret.""" + if self._secrets.get(label): + raise SecretAlreadyExistsError(f"Secret {label} already exists") + + secret = CachedSecret(self._model, self.component, label) + secret.add_secret(content, relation) + self._secrets[label] = secret + return self._secrets[label] + + def remove(self, label: str) -> None: + """Remove a secret from the cache.""" + if secret := self.get(label): + try: + secret.remove() + self._secrets.pop(label) + except (SecretsUnavailableError, KeyError): + pass + else: + return + logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) + + +############################################################################## +# Models classes +############################################################################## + + +class PeerModel(BaseModel): + """Common Model for all peer relations.""" + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + extra="allow", + ) + + @model_validator(mode="after") + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get("repository") + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + if not secret: + logger.info(f"No secret for group {secret_group}") + continue + + value = secret.get_content().get(aliased_field) + + if value and field_info.annotation == OptionalSecretBool: + value = SecretBool(json.loads(value)) + elif value: + value = SecretStr(value) + setattr(self, field, value) + + return self + + @model_serializer(mode="wrap") + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get("repository") + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + value = getattr(self, field) + + actual_value = ( + value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + + if secret is None: + if value: + secret = repository.add_secret( + aliased_field, + actual_value, + secret_group, + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + continue + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if value is None: + full_content.pop(aliased_field, None) + else: + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + return handler(self) + + +class CommonModel(BaseModel): + """Common Model for both requirer and provider. + + request_id stores the request identifier for easier access. + resource is the requested resource. + """ + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + extra="allow", + ) + + resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") + request_id: str | None = Field(default=None) + salt: str = Field( + description="This salt is used to create unique hashes even when other fields map 1-1", + default_factory=gen_salt, + ) + + @model_validator(mode="after") + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get("repository") + short_uuid = self.request_id or gen_hash(self.resource, self.salt) + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + "-", "_" + ) + secret_uri: str | None = getattr(self, secret_field, None) + + if not secret_uri: + continue + + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + if not secret: + logger.info(f"No secret for group {secret_group} and short uuid {short_uuid}") + continue + + value = secret.get_content().get(aliased_field) + if value and field_info.annotation == OptionalSecretBool: + value = SecretBool(json.loads(value)) + elif value: + value = SecretStr(value) + + setattr(self, field, value) + return self + + @model_serializer(mode="wrap") + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + _encountered_secrets: set[tuple[CachedSecret, str]] = set() + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get("repository") + short_uuid = self.request_id or gen_hash(self.resource, self.salt) + # Backward compatibility for v0 regarding secrets. + if info.context.get("version") == "v0": + short_uuid = None + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + "-", "_" + ) + secret_uri: str | None = getattr(self, secret_field, None) + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + value = getattr(self, field) + + actual_value = ( + value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + + if secret is None: + if value: + secret = repository.add_secret( + aliased_field, actual_value, secret_group, short_uuid + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + setattr(self, secret_field, secret.meta.id) + continue + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if value is None: + full_content.pop(aliased_field, None) + _encountered_secrets.add((secret, secret_field)) + else: + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + + # Delete all empty secrets and clean up their fields. + for secret, secret_field in _encountered_secrets: + if not secret.get_content(): + # Setting a field to '' deletes it + setattr(self, secret_field, "") + repository.delete_secret(secret.label) + + return handler(self) + + @classmethod + def _get_secret_field(cls, field: str) -> SecretGroup | None: + """Checks if the field is a secret uri or not.""" + if not field.startswith(SECRET_PREFIX): + return None + + value = field.split("-")[1] + if info := cls.__pydantic_fields__.get(field.replace("-", "_")): + if info.annotation == SecretString: + return SecretGroup(value) + return None + + +class EntityPermissionModel(BaseModel): + """Entity Permissions Model.""" + + resource_name: str + resource_type: str + privileges: list + + +class RequirerCommonModel(CommonModel): + """Requirer side of the request model. + + extra_user_roles is used to request more roles for that user. + external_node_connectivity is used to indicate that the URI should be made for external clients when True + """ + + extra_user_roles: str | None = Field(default=None) + extra_group_roles: str | None = Field(default=None) + external_node_connectivity: bool = Field(default=False) + entity_type: Literal["USER", "GROUP"] | None = Field(default=None) + entity_permissions: list[EntityPermissionModel] | None = Field(default=None) + secret_mtls: SecretString | None = Field(default=None) + mtls_cert: MtlsSecretStr = Field(default=None) + + @model_validator(mode="after") + def validate_fields(self): + """Validates that no inconsistent request is being sent.""" + if self.entity_type and self.entity_type not in ["USER", "GROUP"]: + raise ValueError("Invalid entity-type. Possible values are USER and GROUP") + + if self.entity_type == "USER" and self.extra_group_roles: + raise ValueError("Inconsistent entity information. Use extra_user_roles instead") + + if self.entity_type == "GROUP" and self.extra_user_roles: + raise ValueError("Inconsistent entity information. Use extra_group_roles instead") + + return self + + +class ProviderCommonModel(CommonModel): + """Serialized fields added to the databag. + + endpoints stores the endpoints exposed to that client. + secret_user is a secret URI mapping to the user credentials + secret_tls is a secret URI mapping to the TLS certificate + secret_extra is a secret URI for all additional secrets requested. + """ + + endpoints: str | None = Field(default=None) + read_only_endpoints: str | None = Field(default=None) + secret_user: SecretString | None = Field(default=None) + secret_tls: SecretString | None = Field(default=None) + secret_extra: SecretString | None = Field(default=None) + secret_entity: SecretString | None = Field(default=None) + + +class ResourceProviderModel(ProviderCommonModel): + """Extended model including the deserialized fields.""" + + username: UserSecretStr = Field(default=None) + password: UserSecretStr = Field(default=None) + uris: UserSecretStr = Field(default=None) + read_only_uris: UserSecretStr = Field(default=None) + tls: TlsSecretBool = Field(default=None) + tls_ca: TlsSecretStr = Field(default=None) + entity_name: EntitySecretStr = Field(default=None) + entity_password: EntitySecretStr = Field(default=None) + version: str | None = Field(default=None) + + +class RequirerDataContractV0(RequirerCommonModel): + """Backward compatibility.""" + + version: Literal["v0"] = Field(default="v0") + + original_field: str = Field(exclude=True, default="") + + @model_validator(mode="before") + @classmethod + def ensure_original_field(cls, data: Any): + """Ensures that we keep the original field.""" + if isinstance(data, dict): + for alias in RESOURCE_ALIASES: + if data.get(alias) is not None: + data["original_field"] = alias + break + else: + for alias in RESOURCE_ALIASES: + if getattr(data, alias) is not None: + data.original_field = alias + return data + + +TResourceProviderModel = TypeVar("TResourceProviderModel", bound=ResourceProviderModel) +TRequirerCommonModel = TypeVar("TRequirerCommonModel", bound=RequirerCommonModel) + + +class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): + """The new Data Contract.""" + + version: Literal["v1"] = Field(default="v1") + requests: list[TRequirerCommonModel] + + +def discriminate_on_version(payload: Any) -> str: + """Use the version to discriminate.""" + if isinstance(payload, dict): + return payload.get("version", "v0") + return getattr(payload, "version", "v0") + + +RequirerDataContractType = Annotated[ + Annotated[RequirerDataContractV0, Tag("v0")] | Annotated[RequirerDataContractV1, Tag("v1")], + Discriminator(discriminate_on_version), +] + + +RequirerDataContract = TypeAdapter(RequirerDataContractType) + + +class DataContractV0(ResourceProviderModel): + """The Data contract of the response, for V0.""" + + +class DataContractV1(BaseModel, Generic[TResourceProviderModel]): + """The Data contract of the response, for V1.""" + + version: Literal["v1"] = Field(default="v1") + requests: list[TResourceProviderModel] = Field(default_factory=list) + + +DataContact = TypeAdapter(DataContractV1[ResourceProviderModel]) + + +TCommonModel = TypeVar("TCommonModel", bound=CommonModel) + + +def is_topic_value_acceptable(value: str | None) -> str | None: + """Check whether the given Kafka topic value is acceptable.""" + if value and "*" in value[:3]: + raise ValueError(f"Error on topic '{value}',, unacceptable value.") + return value + + +class KafkaRequestModel(RequirerCommonModel): + """Specialised model for Kafka.""" + + consumer_group_prefix: Annotated[str | None, AfterValidator(is_topic_value_acceptable)] = ( + Field(default=None) + ) + + +class KafkaResponseModel(ResourceProviderModel): + """Kafka response model.""" + + consumer_group_prefix: ExtraSecretStr = Field(default=None) + zookeeper_uris: ExtraSecretStr = Field(default=None) + + +############################################################################## +# AbstractRepository class +############################################################################## + + +class AbstractRepository(ABC): + """Abstract repository interface.""" + + @abstractmethod + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + ... + + @abstractmethod + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> str | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def get_field(self, field: str) -> str | None: + """Gets the value for one field.""" + ... + + @abstractmethod + def get_fields(self, *fields: str) -> dict[str, str | None]: + """Gets the values for all provided fields.""" + ... + + @abstractmethod + def write_field(self, field: str, value: Any) -> None: + """Writes the value in the field, without any secret support.""" + ... + + @abstractmethod + def write_fields(self, mapping: dict[str, Any]) -> None: + """Writes the values of mapping in the fields without any secret support (keys of mapping).""" + ... + + def write_secret_field( + self, field: str, value: Any, group: SecretGroup + ) -> CachedSecret | None: + """Writes a secret field.""" + ... + + @abstractmethod + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def delete_secret(self, label: str): + """Deletes a secret by its label.""" + ... + + @abstractmethod + def delete_field(self, field: str) -> None: + """Deletes a field.""" + ... + + @abstractmethod + def delete_fields(self, *fields: str) -> None: + """Deletes all the provided fields.""" + ... + + @abstractmethod + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + """Delete a field stored in a secret group.""" + ... + + @abstractmethod + def register_secret(self, secret_group: SecretGroup, short_uuid: str | None = None) -> None: + """Registers a secret using the repository.""" + ... + + @abstractmethod + def get_data(self) -> dict[str, Any] | None: + """Gets the whole data.""" + ... + + @abstractmethod + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Builds a secret field.""" + + +class OpsRepository(AbstractRepository): + """Implementation for ops repositories, with some methods left out.""" + + SECRET_FIELD_NAME: str + + IGNORES_GROUPS: list[SecretGroup] = [] + + uri_to_databag: bool = True + + def __init__( + self, + model: Model, + relation: Relation | None, + component: Unit | Application, + ): + self._local_app = model.app + self._local_unit = model.unit + self.relation = relation + self.component = component + self.model = model + self.secrets = SecretCache(model, component) + + @abstractmethod + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group mapping for secrets within a relation context.""" + ... + + @override + def get_data(self) -> dict[str, Any] | None: + ret: dict[str, Any] = {} + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + for key, value in self.relation.data[self.component].items(): + try: + ret[key] = json.loads(value) + except json.JSONDecodeError: + ret[key] = value + + return ret + + @override + @ensure_leader_for_app + def get_field( + self, + field: str, + ) -> str | None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + relation_data = self.relation.data[self.component] + return relation_data.get(field) + + @override + @ensure_leader_for_app + def get_fields(self, *fields: str) -> dict[str, str]: + res = {} + for field in fields: + if (value := self.get_field(field)) is not None: + res[field] = value + return res + + @override + @ensure_leader_for_app + def write_field(self, field: str, value: Any) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + if not value: + return None + self.relation.data[self.component].update({field: value}) + + @override + @ensure_leader_for_app + def write_fields(self, mapping: dict[str, Any]) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + (self.write_field(field, value) for field, value in mapping.items()) + + @override + @ensure_leader_for_app + def write_secret_field( + self, field: str, value: Any, secret_group: SecretGroup + ) -> CachedSecret | None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = self.get_field(self.secret_field(secret_group, field)) + + secret = self.secrets.get(label=label, uri=secret_uri) + if not secret: + return self.add_secret(field, value, secret_group) + else: + content = secret.get_content() + full_content = copy.deepcopy(content) + full_content.update({field: value}) + secret.set_content(full_content) + return secret + + @override + @ensure_leader_for_app + def delete_field(self, field: str) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + relation_data = self.relation.data[self.component] + try: + relation_data.pop(field) + except KeyError: + logger.debug( + f"Non existent field {field} was attempted to be removed from the databag (relation ID: {self.relation.id})" + ) + + @override + @ensure_leader_for_app + def delete_fields(self, *fields: str) -> None: + (self.delete_field(field) for field in fields) + + @override + @ensure_leader_for_app + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + relation_data = self.relation.data[self.component] + secret_field = self.secret_field(secret_group, field) + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = relation_data.get(secret_field) + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logging.error(f"Can't delete secret for relation {self.relation.id}") + return None + + content = secret.get_content() + new_content = copy.deepcopy(content) + try: + new_content.pop(field) + except KeyError: + logging.debug( + f"Non-existing secret '{field}' was attempted to be removed" + f"from relation {self.relation.id} and group {secret_group}" + ) + + # Write the new secret content if necessary + if new_content: + secret.set_content(new_content) + return + + # Remove the secret from the relation if it's fully gone. + try: + relation_data.pop(field) + except KeyError: + pass + self.secrets.remove(label) + return + + @ensure_leader_for_app + def register_secret(self, uri: str, secret_group: SecretGroup, short_uuid: str | None = None): + """Registers the secret group for this relation. + + [MAGIC HERE] + If we fetch a secret using get_secret(id=, label=), + then will be "stuck" on the Secret object, whenever it may + appear (i.e. as an event attribute, or fetched manually) on future occasions. + + This will allow us to uniquely identify the secret on Provider side (typically on + 'secret-changed' events), and map it to the corresponding relation. + """ + if not self.relation: + raise ValueError("Cannot register without relation.") + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + CachedSecret(self.model, self.component, label, uri).meta + + @override + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + return None + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + return self.secrets.get(label=label, uri=secret_uri) + + @override + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + uri: str | None = None, + short_uuid: str | None = None, + ) -> Any | None: + """Gets a value for a field stored in a secret group.""" + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + return None + + secret_field = self.secret_field(secret_group, field) + + relation_data = self.relation.data[self.component] + secret_uri = uri or relation_data.get(secret_field) + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + if self.uri_to_databag and not secret_uri: + logger.info(f"No secret for group {secret_group} in relation {self.relation}") + return None + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logger.info(f"No secret for group {secret_group} in relation {self.relation}") + return None + + content = secret.get_content().get(field) + + if not content: + return + + try: + return json.loads(content) + except json.JSONDecodeError: + return content + + @override + @ensure_leader_for_app + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + if not self.relation: + logger.info("No relation to get value from") + return None + + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + + label = self._generate_secret_label(self.relation, secret_group, short_uuid) + + secret = self.secrets.add(label, {field: value}, self.relation) + + if not secret.meta or not secret.meta.id: + logging.error("Secret is missing Secret ID") + raise SecretError("Secret added but is missing Secret ID") + + return secret + + @override + @ensure_leader_for_app + def delete_secret(self, label: str) -> None: + self.secrets.remove(label) + + +@final +class OpsRelationRepository(OpsRepository): + """Implementation of the Abstract Repository for non peer relations.""" + + SECRET_FIELD_NAME: str = "secret" + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + if short_uuid: + return f"{relation.name}.{relation.id}.{short_uuid}.{secret_group}.secret" + return f"{relation.name}.{relation.id}.{secret_group}.secret" + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + return f"{self.SECRET_FIELD_NAME}-{secret_group}" + + +class OpsPeerRepository(OpsRepository): + """Implementation of the Ops Repository for peer relations.""" + + SECRET_FIELD_NAME = "internal_secret" + + IGNORES_GROUPS = [ + SecretGroup("user"), + SecretGroup("entity"), + SecretGroup("mtls"), + SecretGroup("tls"), + ] + + uri_to_databag: bool = False + + @property + def scope(self) -> Scope: + """Returns a scope.""" + if isinstance(self.component, Application): + return Scope.APP + if isinstance(self.component, Unit): + return Scope.UNIT + raise ValueError("Invalid component, neither a Unit nor an Application") + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + members = [relation.name, self._local_app.name, self.scope.value] + + if secret_group != SecretGroup("extra"): + members.append(secret_group) + return f"{'.'.join(members)}" + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + if not field: + raise ValueError("Must have a field.") + return f"{field}@{secret_group}" + + +@final +class OpsPeerUnitRepository(OpsPeerRepository): + """Implementation for a unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + super().__init__(model, relation, component) + + +@final +class OpsOtherPeerUnitRepository(OpsPeerRepository): + """Implementation for a remote unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + if component == model.unit: + raise ValueError(f"Can't instantiate {self.__class__.__name__} with local unit.") + super().__init__(model, relation, component) + + @override + def write_field(self, field: str, value: Any) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def write_fields(self, mapping: dict[str, Any]) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def add_secret( + self, field: str, value: Any, secret_group: SecretGroup, short_uuid: str | None = None + ) -> CachedSecret | None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_field(self, field: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_fields(self, *fields: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + +TRepository = TypeVar("TRepository", bound=OpsRepository) +TCommon = TypeVar("TCommon", bound=BaseModel) +TPeerCommon = TypeVar("TPeerCommon", bound=PeerModel) +TCommonBis = TypeVar("TCommonBis", bound=BaseModel) + + +class RepositoryInterface(Generic[TRepository, TCommon]): + """Repository builder.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + component: Unit | Application, + repository_type: type[TRepository], + model: type[TCommon] | TypeAdapter | None, + ): + self.charm = charm + self._model = charm.model + self.repository_type = repository_type + self.relation_name = relation_name + self.model = model + self.component = component + + @property + def relations(self) -> list[Relation]: + """The list of Relation instances associated with this relation name.""" + return self._model.relations[self.relation_name] + + def repository( + self, relation_id: int, component: Unit | Application | None = None + ) -> TRepository: + """Returns a repository for the relation.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + return self.repository_type(self._model, relation, component or self.component) + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommon], + component: Unit | Application | None = None, + ) -> TCommon: ... + + @overload + def build_model( + self, + relation_id: int, + model: TypeAdapter[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: None = None, + component: Unit | Application | None = None, + ) -> TCommon: ... + + def build_model( + self, + relation_id: int, + model: type[TCommon] | TypeAdapter[TCommonBis] | None = None, + component: Unit | Application | None = None, + ) -> TCommon | TCommonBis: + """Builds a model using the repository for that relation.""" + model = model or self.model # First the provided model (allows for specialisation) + component = component or self.component + if not model: + raise ValueError("Missing model to specialise data") + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + return build_model(self.repository_type(self._model, relation, component), model) + + def write_model( + self, relation_id: int, model: BaseModel, context: dict[str, str] | None = None + ): + """Writes the model using the repository.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + + write_model( + self.repository_type(self._model, relation, self.component), model, context=context + ) + + +class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, TCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.app, OpsRelationRepository, model) + + +class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.app, OpsPeerRepository, model) + + +class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): + """Specialised Interface to build repositories for this unit peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.unit, OpsPeerUnitRepository, model) + + +class OpsOtherPeerUnitRepositoryInterface( + RepositoryInterface[OpsOtherPeerUnitRepository, TPeerCommon] +): + """Specialised Interface to build repositories for another unit peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + unit: Unit, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, unit, OpsOtherPeerUnitRepository, model) + + +############################################################################## +# DDD implementation methods +############################################################################## +############################################################################## + + +def build_model(repository: AbstractRepository, model: type[TCommon] | TypeAdapter) -> TCommon: + """Builds a common model using the provided repository and provided model structure.""" + data = repository.get_data() or {} + + data.pop("data", None) + + # Beware this means all fields should have a default value here. + if isinstance(model, TypeAdapter): + return model.validate_python(data, context={"repository": repository}) + + return model.model_validate(data, context={"repository": repository}) + + +def write_model( + repository: AbstractRepository, model: BaseModel, context: dict[str, str] | None = None +): + """Writes the data stored in the model using the repository object.""" + context = context or {} + dumped = model.model_dump( + mode="json", context={"repository": repository} | context, exclude_none=False + ) + for field, value in dumped.items(): + if value is None: + repository.delete_field(field) + continue + dumped_value = value if isinstance(value, str) else json.dumps(value) + repository.write_field(field, dumped_value) + + +############################################################################## +# Custom Events +############################################################################## + + +class ResourceProviderEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.request = request + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["request"] = pickle.dumps(self.request) + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.request = pickle.loads(snapshot["request"]) + + +class ResourceRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource requested event.""" + + pass + + +class ResourceEntityRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource Entity requested event.""" + + pass + + +class ResourceEntityPermissionsChangedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + pass + + +class MtlsCertUpdatedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + old_mtls_cert: str | None = None, + ): + super().__init__(handle, relation, app, unit, request) + + self.old_mtls_cert = old_mtls_cert + + def snapshot(self): + """Return a snapshot of the event.""" + return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} + + def restore(self, snapshot): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.old_mtls_cert = snapshot["old_mtls_cert"] + + +class BulkResourcesRequestedEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + requests: list[TRequirerCommonModel], + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.requests = requests + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["requests"] = [pickle.dumps(request) for request in self.requests] + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.requests = [pickle.loads(request) for request in snapshot["requests"]] + + +class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + bulk_resources_requested = EventSource(BulkResourcesRequestedEvent) + resource_requested = EventSource(ResourceRequestedEvent) + resource_entity_requested = EventSource(ResourceEntityRequestedEvent) + resource_entity_permissions_changed = EventSource(ResourceEntityPermissionsChangedEvent) + mtls_cert_updated = EventSource(MtlsCertUpdatedEvent) + + +class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): + """Resource created/changed event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, response + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + response: TResourceProviderModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.response = response + + def snapshot(self) -> dict: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["response"] = pickle.dumps(self.response) + return snapshot + + def restore(self, snapshot: dict): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + + self.response = pickle.loads(snapshot["response"]) + + +class ResourceCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource has been created.""" + + pass + + +class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource entity has been created.""" + + pass + + +class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read/Write enpoints are changed.""" + + pass + + +class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read-only enpoints are changed.""" + + pass + + +class AuthenticationUpdatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Authentication was updated for a user.""" + + pass + + +class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + resource_created = EventSource(ResourceCreatedEvent) + resource_entity_created = EventSource(ResourceEntityCreatedEvent) + endpoints_changed = EventSource(ResourceEndpointsChangedEvent) + read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) + authentication_updated = EventSource(AuthenticationUpdatedEvent) + + +############################################################################## +# Event Handlers +############################################################################## + + +class EventHandlers(Object): + """Requires-side of the relation.""" + + component: Application | Unit + interface: RepositoryInterface + + def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ""): + """Manager of base client relations.""" + if not unique_key: + unique_key = relation_name + super().__init__(charm, unique_key) + + self.charm = charm + self.relation_name = relation_name + + self.framework.observe( + charm.on[self.relation_name].relation_changed, + self._on_relation_changed_event, + ) + + self.framework.observe( + self.charm.on[self.relation_name].relation_created, + self._on_relation_created_event, + ) + + self.framework.observe( + charm.on.secret_changed, + self._on_secret_changed_event, + ) + + @property + def relations(self) -> list[Relation]: + """Shortcut to get access to the relations.""" + return self.interface.relations + + # Event handlers + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + pass + + @abstractmethod + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _handle_event( + self, + ): + """Handles the event and reacts accordingly.""" + pass + + def compute_diff( + self, + relation: Relation, + request: RequirerCommonModel | ResourceProviderModel, + repository: AbstractRepository | None = None, + store: bool = True, + ) -> Diff: + """Computes, stores and returns a diff for that request.""" + if not repository: + repository = OpsRelationRepository(self.model, relation, component=relation.app) + + # Gets the data stored in the databag for diff computation + old_data = get_encoded_dict(relation, self.component, "data") + + # In case we're V1, we select specifically this request + if old_data and request.request_id: + old_data: dict | None = old_data.get(request.request_id, None) + + # dump the data of the current request so we can compare + new_data = request.model_dump( + mode="json", + exclude={"data"}, + exclude_none=True, + exclude_defaults=True, + ) + + # Computes the diff + _diff = diff(old_data, new_data) + + if store: + # Update the databag with the new data for later diff computations + store_new_data(relation, self.component, new_data, short_uuid=request.request_id) + + return _diff + + def _relation_from_secret_label(self, secret_label: str) -> Relation | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 3): + return + + try: + relation_id = int(contents[1]) + except ValueError: + return + + relation_name = contents[0] + + try: + return self.model.get_relation(relation_name, relation_id) + except ModelError: + return + + def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 5): + return + + return contents[2] + + +class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): + """Event Handler for resource provider.""" + + on = ResourceProvidesEvents[TRequirerCommonModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + request_model: type[TRequirerCommonModel], + unique_key: str = "", + mtls_enabled: bool = False, + bulk_event: bool = False, + ): + """Builds a resource provider event handler. + + Args: + charm: The charm. + relation_name: The relation name this event handler is listening to. + request_model: The request model that is expected to be received. + unique_key: An optional unique key for that object. + mtls_enabled: If True, means the server supports MTLS integration. + bulk_event: If this is true, only one event will be emitted with all requests in the case of a v1 requirer. + """ + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.app + self.request_model = request_model + self.interface = OpsRelationRepositoryInterface(charm, relation_name, request_model) + self.mtls_enabled = mtls_enabled + self.bulk_event = bulk_event + + @staticmethod + def _validate_diff(event: RelationEvent, _diff: Diff) -> None: + """Validates that entity information is not changed after relation is established. + + - When entity-type changes, backwards compatibility is broken. + - When extra-user-roles changes, role membership checks become incredibly complex. + - When extra-group-roles changes, role membership checks become incredibly complex. + """ + if not isinstance(event, RelationChangedEvent): + return + + for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: + if key in _diff.changed: + raise ValueError(f"Cannot change {key} after relation has already been created") + + def _dispatch_events(self, event: RelationEvent, _diff: Diff, request: RequirerCommonModel): + if self.mtls_enabled and "secret-mtls" in _diff.added: + getattr(self.on, "mtls_cert_updated").emit( + event.relation, app=event.app, unit=event.unit, request=request, old_mtls_cert=None + ) + return + # Emit a resource requested event if the setup key (resource name) + # was added to the relation databag, but the entity-type key was not. + if resource_added(_diff) and "entity-type" not in _diff.added: + getattr(self.on, "resource_requested").emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (resource name) + # was added to the relation databag, in addition to the entity-type key. + if resource_added(_diff) and "entity-type" in _diff.added: + getattr(self.on, "resource_entity_requested").emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (resource name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + not resource_added(_diff) + and "entity-type" not in _diff.added + and ("entity-permissions" in _diff.added or "entity-permissions" in _diff.changed) + ): + getattr(self.on, "resource_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit, request=request + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request: RequirerCommonModel, + ): + _diff = self.compute_diff(event.relation, request, repository) + + self._validate_diff(event, _diff) + self._dispatch_events(event, _diff, request) + + def _handle_bulk_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request_model: RequirerDataContractV1[TRequirerCommonModel], + ): + """Validate all the diffs, then dispatch the bulk event AND THEN stores the diff. + + This allows for the developer to process the diff and store it themselves + """ + for request in request_model.requests: + # Compute the diff without storing it so we can validate the diffs. + _diff = self.compute_diff(event.relation, request, repository, store=False) + self._validate_diff(event, _diff) + + getattr(self.on, "bulk_resources_requested").emit( + event.relation, app=event.app, unit=event.unit, requests=request_model.requests + ) + + # Store all the diffs if they were not already stored. + for request in request_model.requests: + new_data = request.model_dump( + mode="json", + exclude={"data"}, + context={"repository": repository}, + exclude_none=True, + exclude_defaults=True, + ) + store_new_data(event.relation, self.component, new_data, request.request_id) + + @override + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + if not self.mtls_enabled: + logger.info("MTLS is disabled, exiting early.") + return + if not event.secret.label: + return + + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + old_mtls_cert = event.secret.get_content().get("mtls-cert") + logger.info("mtls-cert-updated") + + # V0, just fire the event. + if version == "v0": + request = build_model(repository, RequirerDataContractV0) + # V1, find the corresponding request. + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if not short_uuid: + return + for _request in request_model.requests: + if _request.request_id == short_uuid: + request = _request + break + else: + logger.info(f"Unknown request id {short_uuid}") + return + + getattr(self.on, "mtls_cert_updated").emit( + relation, + app=relation.app, + unit=remote_unit, + request=request, + mtls_cert=old_mtls_cert, + ) + + @override + def _on_relation_changed_event(self, event: RelationChangedEvent): + if not self.charm.unit.is_leader(): + return + + repository = OpsRelationRepository( + self.model, event.relation, component=event.relation.app + ) + + # Don't do anything until we get some data + if not repository.get_data(): + return + + version = repository.get_field("version") or "v0" + if version == "v0": + request_model = build_model(repository, RequirerDataContractV0) + old_name = request_model.original_field + request_model.request_id = None # For safety, let's ensure that we don't have a model. + self._handle_event(event, repository, request_model) + logger.info( + f"Patching databag for v0 compatibility: replacing 'resource' by '{old_name}'" + ) + self.interface.repository( + event.relation.id, + ).write_field(old_name, request_model.resource) + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if self.bulk_event: + self._handle_bulk_event(event, repository, request_model) + return + for request in request_model.requests: + self._handle_event(event, repository, request) + + def set_response(self, relation_id: int, response: ResourceProviderModel): + r"""Sets a response in the databag. + + This function will react accordingly to the version number. + If the version number is v0, then we write the data directly in the databag. + If the version number is v1, then we write the data in the list of responses. + + /!\ This function updates a response if it was already present in the databag! + + Args: + relation_id: The specific relation id for that event. + response: The response to write in the databag. + """ + if not self.charm.unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + raise ValueError("Missing relation.") + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + if version == "v0": + # Ensure the request_id is None + response.request_id = None + self.interface.write_model( + relation_id, response, context={"version": "v0"} + ) # {"database": "database-name", "secret-user": "uri", ...} + return + + model = self.interface.build_model(relation_id, DataContractV1[response.__class__]) + + # for/else syntax allows to execute the else if break was not called. + # This allows us to update or append easily. + for index, _response in enumerate(model.requests): + if _response.request_id == response.request_id: + model.requests[index] = response + break + else: + model.requests.append(response) + + self.interface.write_model(relation_id, model) + return + + +class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): + """Event Handler for resource requirer.""" + + on = ResourceRequiresEvents[TResourceProviderModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + requests: list[RequirerCommonModel], + response_model: type[TResourceProviderModel], + unique_key: str = "", + relation_aliases: list[str] | None = None, + ): + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.unit + self.relation_aliases = relation_aliases + self._requests = requests + self.response_model = DataContractV1[response_model] + self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( + OpsRelationRepositoryInterface(charm, relation_name, self.response_model) + ) + + if requests: + self._request_model = requests[0].__class__ + else: + self._request_model = RequirerCommonModel + + # First, check that the number of aliases matches the one defined in charm metadata. + if self.relation_aliases: + relation_connection_limit = self.charm.meta.requires[relation_name].limit + if len(self.relation_aliases) != relation_connection_limit: + raise ValueError( + f"Invalid number of aliases, expected {relation_connection_limit}, received {len(self.relation_aliases)}" + ) + + # Created custom event names for each alias. + if self.relation_aliases: + for relation_alias in self.relation_aliases: + self.on.define_event( + f"{relation_alias}_resource_created", + ResourceCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_resource_entity_created", + ResourceEntityCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_endpoints_changed", + ResourceEndpointsChangedEvent, + ) + self.on.define_event( + f"{relation_alias}_read_only_endpoints_changed", + ResourceReadOnlyEndpointsChangedEvent, + ) + + ############################################################################## + # Extra useful functions + ############################################################################## + def is_resource_created( + self, + rel_id: int, + request_id: str, + model: DataContractV1[TResourceProviderModel] | None = None, + ) -> bool: + """Checks if a resource has been created or not. + + Args: + rel_id: The relation id to check. + request_id: The specific request id to check. + model: An optional model to use (for performances). + """ + if not model: + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + for request in model.requests: + if request.request_id == request_id: + return request.secret_user is not None or request.secret_entity is not None + return False + + def are_all_resources_created(self, rel_id: int) -> bool: + """Checks that all resources have been created for a relation. + + Args: + rel_id: The relation id to check. + """ + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + return all( + self.is_resource_created(rel_id, request.request_id, model) + for request in model.requests + if request.request_id + ) + + @staticmethod + def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: + # Actual checking method. + # No need to check for psycopg here, it's been checked before. + if not psycopg2: + return False + + try: + with psycopg2.connect(connection_string) as connection: + with connection.cursor() as cursor: + cursor.execute( + "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) + ) + return cursor.fetchone() is not None + except psycopg2.Error as e: + logger.exception( + f"failed to check whether {plugin} plugin is enabled in the database: %s", + str(e), + ) + return False + + def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: + """Returns whether a plugin is enabled in the database. + + Args: + plugin: name of the plugin to check. + relation_index: Optional index to check the database (default: 0 - first relation). + """ + if not psycopg2: + return False + + # Can't check a non existing relation. + if len(self.relations) <= relation_index: + return False + + relation = self.relations[relation_index] + model = self.interface.build_model(relation_id=relation.id, component=relation.app) + for request in model.requests: + if request.endpoints and request.username and request.password: + host = request.endpoints.split(":")[0] + username = request.username.get_secret_value() + password = request.password.get_secret_value() + + connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" + return self._is_pg_plugin_enabled(plugin, connection_string) + logger.info("No valid request to use to check for plugin.") + return False + + ############################################################################## + # Helpers for aliases + ############################################################################## + + def _assign_relation_alias(self, relation_id: int) -> None: + """Assigns an alias to a relation. + + This function writes in the unit data bag. + + Args: + relation_id: the identifier for a particular relation. + """ + # If no aliases were provided, return immediately. + if not self.relation_aliases: + return + + # Return if an alias was already assigned to this relation + # (like when there are more than one unit joining the relation). + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation and relation.data[self.charm.unit].get("alias"): + return + + # Retrieve the available aliases (the ones that weren't assigned to any relation). + available_aliases = self.relation_aliases[:] + for relation in self.charm.model.relations[self.relation_name]: + alias = relation.data[self.charm.unit].get("alias") + if alias: + logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) + available_aliases.remove(alias) + + # Set the alias in the unit relation databag of the specific relation. + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation: + relation.data[self.charm.unit].update({"alias": available_aliases[0]}) + + # We need to set relation alias also on the application level so, + # it will be accessible in show-unit juju command, executed for a consumer application unit + if relation and self.charm.unit.is_leader(): + relation.data[self.charm.app].update({"alias": available_aliases[0]}) + + def _emit_aliased_event( + self, event: RelationChangedEvent, event_name: str, response: ResourceProviderModel + ): + """Emit all aliased events.""" + alias = self._get_relation_alias(event.relation.id) + if alias: + getattr(self.on, f"{alias}_{event_name}").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + + def _get_relation_alias(self, relation_id: int) -> str | None: + """Gets the relation alias for a relation id.""" + for relation in self.charm.model.relations[self.relation_name]: + if relation.id == relation_id: + return relation.data[self.charm.unit].get("alias") + return None + + ############################################################################## + # Event Handlers + ############################################################################## + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + + response_model = self.interface.build_model(relation.id) + if not short_uuid: + return + for _response in response_model.requests: + if _response.request_id == short_uuid: + response = _response + break + else: + logger.info(f"Unknown request id {short_uuid}") + return + + getattr(self.on, "authentication_updated").emit( + relation, + app=relation.app, + unit=remote_unit, + response=response, + ) + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the database relation is created.""" + super()._on_relation_created_event(event) + + repository = OpsRelationRepository(self.model, event.relation, self.charm.app) + + # If relations aliases were provided, assign one to the relation. + self._assign_relation_alias(event.relation.id) + + if not self.charm.unit.is_leader(): + return + + # Generate all requests id so they are saved already. + for request in self._requests: + request.request_id = gen_hash(request.resource, request.salt) + + full_request = RequirerDataContractV1[self._request_model]( + version="v1", requests=self._requests + ) + write_model(repository, full_request) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the database relation has changed.""" + is_subordinate = False + remote_unit_data = None + for key in event.relation.data.keys(): + if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): + remote_unit_data = event.relation.data[key] + elif isinstance(key, Application) and key.name != self.charm.app.name: + is_subordinate = event.relation.data[key].get("subordinated") == "true" + + if is_subordinate: + if not remote_unit_data or remote_unit_data.get("state") != "ready": + return + + repository = self.interface.repository(event.relation.id, event.app) + response_model = self.interface.build_model(event.relation.id, component=event.app) + + if not response_model.requests: + logger.info("Still waiting for data.") + return + + data = repository.get_field("data") + if not data: + logger.info("Missing data to compute diffs") + return + + request_map = TypeAdapter(dict[str, self._request_model]).validate_json(data) + + for response in response_model.requests: + response_id = response.request_id or gen_hash(response.resource, response.salt) + request = request_map.get(response_id, None) + if not request: + raise ValueError( + f"No request matching the response with response_id {response_id}" + ) + self._handle_event(event, repository, request, response) + + ############################################################################## + # Methods to handle specificities of relation events + ############################################################################## + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: OpsRelationRepository, + request: RequirerCommonModel, + response: ResourceProviderModel, + ): + _diff = self.compute_diff(event.relation, response, repository, store=True) + + for newval in _diff.added: + if secret_group := response._get_secret_field(newval): + uri = getattr(response, newval.replace("-", "_")) + repository.register_secret(uri, secret_group, response.request_id) + + if "secret-user" in _diff.added and not request.entity_type: + logger.info(f"resource {response.resource} created at {datetime.now()}") + getattr(self.on, "resource_created").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "resource_created", response) + return + + if "secret-entity" in _diff.added and request.entity_type: + logger.info(f"entity {response.entity_name} created at {datetime.now()}") + getattr(self.on, "resource_entity_created").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "resource_entity_created", response) + return + + if "endpoints" in _diff.added or "endpoints" in _diff.changed: + logger.info(f"endpoints changed at {datetime.now()}") + getattr(self.on, "endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "endpoints_changed", response) + return + + if "read-only-endpoints" in _diff.added or "read-only-endpoints" in _diff.changed: + logger.info(f"read-only-endpoints changed at {datetime.now()}") + getattr(self.on, "read_only_endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "read_only_endpoints_changed", response) + return diff --git a/tests/v1/integration/opensearch-charm/charmcraft.yaml b/tests/v1/integration/opensearch-charm/charmcraft.yaml index b4317391..ad2c056e 100644 --- a/tests/v1/integration/opensearch-charm/charmcraft.yaml +++ b/tests/v1/integration/opensearch-charm/charmcraft.yaml @@ -2,12 +2,11 @@ # See LICENSE file for licensing details. type: charm -# Whenever "bases" is changed: -# - Update tests/integration/conftest.py::pytest_configure() -# - Update .github/workflow/ci.yaml integration-test matrix +base: ubuntu@22.04 platforms: - ubuntu@22.04:amd64: - ubuntu@24.04:amd64: + amd64: + build-on: [amd64] + build-for: [amd64] # Files implicitly created by charmcraft without a part: # - dispatch (https://github.com/canonical/charmcraft/pull/1898) @@ -59,14 +58,7 @@ parts: - libssl-dev # Needed to build Python dependencies with Rust from source - pkg-config # Needed to build Python dependencies with Rust from source override-build: | - # Workaround for https://github.com/canonical/charmcraft/issues/2068 - # rustup used to install rustc and cargo, which are needed to build Python dependencies with Rust from source - if [[ "$CRAFT_PLATFORM" == ubuntu@20.04:* || "$CRAFT_PLATFORM" == ubuntu@22.04:* ]] - then - snap install rustup --classic - else - apt-get install rustup -y - fi + snap install rustup --classic # If Ubuntu version < 24.04, rustup was installed from snap instead of from the Ubuntu # archive—which means the rustup version could be updated at any time. Print rustup version diff --git a/tests/v1/integration/opensearch-charm/lib/charms/data_platform_libs/v1/data_interfaces.py b/tests/v1/integration/opensearch-charm/lib/charms/data_platform_libs/v1/data_interfaces.py new file mode 100644 index 00000000..e22388d1 --- /dev/null +++ b/tests/v1/integration/opensearch-charm/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -0,0 +1,2753 @@ +# Copyright 2025 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +r"""Library to manage the relation for the data-platform products. + +This V1 has been specified in https://docs.google.com/document/d/1lnuonWnoQb36RWYwfHOBwU0VClLbawpTISXIC_yNKYo, and should be backward compatible with v0 clients. + +This library contains the Requires and Provides classes for handling the relation +between an application and multiple managed application supported by the data-team: +MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. + +#### Models + +This library exposes basic default models that can be used in most cases. +If you need more complex models, you can subclass them. + +```python +from charms.data_platform_libs.v1.data_interfaces import RequirerCommonModel, ExtraSecretStr + +class ExtendedCommonModel(RequirerCommonModel): + operator_password: ExtraSecretStr +``` + +Secret groups are handled using annotated types. If you wish to add extra secret groups, please follow the following model. The string metadata represents the secret group name, and `OptionalSecretStr` is a TypeAlias for `SecretStr | None`. Finally, `SecretStr` represents a field validating the URI pattern `secret:.*` + +```python +MyGroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] +``` + +Fields not specified as OptionalSecretStr and extended with a group name in the metadata will NOT get serialised. + + +#### Requirer Charm + +This library is a uniform interface to a selection of common database +metadata, with added custom events that add convenience to database management, +and methods to consume the application related data. + + +```python +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ClientCharm(CharmBase): + # Database charm that accepts connections from application charms. + def __init__(self, *args) -> None: + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + RequirerCommonModel( + entity_type="USER", + ) + ] + self.database = ResourceRequirerEventHandler( + self,"database", requests, response_model=ResourceProviderModel + ) + self.framework.observe(self.database.on.resource_created, self._on_resource_created) + self.framework.observe(self.database.on.resource_entity_created, self._on_resource_entity_created) + + def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new database is created. + relation_id = event.relation.id + response = event.response # This is the response model + + username = event.response.username + password = event.response.password + ... + + def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new entity is created. + ... + +Compared to V1, this library makes heavy use of pydantic models, and allows for +multiple requests, specified as a list. +On the Requirer side, each response will trigger one custom event for that response. +This way, it allows for more strategic events to be emitted according to the request. + +As show above, the library provides some custom events to handle specific situations, which are listed below: +- resource_created: event emitted when the requested database is created. +- resource_entity_created: event emitted when the requested entity is created. +- endpoints_changed: event emitted when the read/write endpoints of the database have changed. +- read_only_endpoints_changed: event emitted when the read-only endpoints of the database + have changed. Event is not triggered if read/write endpoints changed too. + +If it is needed to connect multiple database clusters to the same relation endpoint +the application charm can implement the same code as if it would connect to only +one database cluster (like the above code example). + +To differentiate multiple clusters connected to the same relation endpoint +the application charm can use the name of the remote application: + +```python + +def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Get the remote app name of the cluster that triggered this event + cluster = event.relation.app.name +``` + +It is also possible to provide an alias for each different database cluster/relation. + +So, it is possible to differentiate the clusters in two ways. +The first is to use the remote application name, i.e., `event.relation.app.name`, as above. + +The second way is to use different event handlers to handle each cluster events. +The implementation would be something like the following code: + +```python + +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ApplicationCharm(CharmBase): + # Application charm that connects to database charms. + + def __init__(self, *args): + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + ] + # Define the cluster aliases and one handler for each cluster database created event. + self.database = ResourceRequirerEventHandler( + self, + relation_name="database" + relations_aliases = ["cluster1", "cluster2"], + requests= + ) + self.framework.observe( + self.database.on.cluster1_resource_created, self._on_cluster1_resource_created + ) + self.framework.observe( + self.database.on.cluster2_resource_created, self._on_cluster2_resource_created + ) + + def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster1 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... + + def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster2 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... +``` + +### Provider Charm + +Following an example of using the ResourceRequestedEvent, in the context of the +database charm code: + +```python +from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides + +class SampleCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + # Charm events defined in the database provides charm library. + self.provided_database = DatabaseProvides(self, relation_name="database") + self.framework.observe(self.provided_database.on.database_requested, + self._on_database_requested) + # Database generic helper + self.database = DatabaseHelper() + + def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: + # Handle the event triggered by a new database requested in the relation + # Retrieve the database name using the charm library. + db_name = event.database + # generate a new user credential + username = self.database.generate_user() + password = self.database.generate_password() + # set the credentials for the relation + self.provided_database.set_credentials(event.relation.id, username, password) + # set other variables for the relation event.set_tls("False") +``` + +As shown above, the library provides a custom event (database_requested) to handle +the situation when an application charm requests a new database to be created. +It's preferred to subscribe to this event instead of relation changed event to avoid +creating a new database when other information other than a database name is +exchanged in the relation databag. + +""" + +import copy +import hashlib +import json +import logging +import pickle +import random +import string +from abc import ABC, abstractmethod +from collections import namedtuple +from datetime import datetime +from enum import Enum +from typing import ( + Annotated, + Any, + ClassVar, + Generic, + Literal, + NewType, + TypeAlias, + TypeVar, + final, + overload, +) + +from ops import ( + CharmBase, + EventBase, + Model, + RelationChangedEvent, + RelationCreatedEvent, + RelationEvent, + Secret, + SecretChangedEvent, + SecretInfo, + SecretNotFoundError, +) +from ops.charm import CharmEvents +from ops.framework import EventSource, Handle, Object +from ops.model import Application, ModelError, Relation, Unit +from pydantic import ( + AfterValidator, + AliasChoices, + BaseModel, + ConfigDict, + Discriminator, + Field, + SecretStr, + SerializationInfo, + SerializerFunctionWrapHandler, + Tag, + TypeAdapter, + ValidationInfo, + model_serializer, + model_validator, +) +from pydantic.types import _SecretBase, _SecretField +from pydantic_core import CoreSchema, core_schema +from typing_extensions import TypeAliasType, override + +try: + import psycopg2 +except ImportError: + psycopg2 = None + +# The unique Charmhub library identifier, never change it +LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" + +# Increment this major API version when introducing breaking changes +LIBAPI = 1 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 0 + +PYDEPS = ["ops>=2.0.0", "pydantic>=2.11"] + +logger = logging.getLogger(__name__) + +MODEL_ERRORS = { + "not_leader": "this unit is not the leader", + "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", + "owner_no_refresh": "ERROR secret owner cannot use --refresh", +} + +RESOURCE_ALIASES = [ + "database", + "subject", + "topic", + "index", + "plugin-url", +] + +SECRET_PREFIX = "secret-" + + +############################################################################## +# Exceptions +############################################################################## + + +class DataInterfacesError(Exception): + """Common ancestor for DataInterfaces related exceptions.""" + + +class SecretError(DataInterfacesError): + """Common ancestor for Secrets related exceptions.""" + + +class SecretAlreadyExistsError(SecretError): + """A secret that was to be added already exists.""" + + +class SecretsUnavailableError(SecretError): + """Secrets aren't yet available for Juju version used.""" + + +class IllegalOperationError(DataInterfacesError): + """To be used when an operation is not allowed to be performed.""" + + +############################################################################## +# Global helpers / utilities +############################################################################## + + +def gen_salt() -> str: + """Generates a consistent salt.""" + return "".join(random.choices(string.ascii_letters + string.digits, k=16)) + + +def gen_hash(resource_name: str, salt: str) -> str: + """Generates a consistent hash based on the resource name and salt.""" + hasher = hashlib.sha256() + hasher.update(f"{resource_name}:{salt}".encode()) + return hasher.hexdigest()[:16] + + +def ensure_leader_for_app(f): + """Decorator to ensure that only leader can perform given operation.""" + + def wrapper(self, *args, **kwargs): + if self.component == self._local_app and not self._local_unit.is_leader(): + logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") + return + return f(self, *args, **kwargs) + + wrapper.leader_only = True + return wrapper + + +def get_encoded_dict( + relation: Relation, member: Unit | Application, field: str +) -> dict[str, Any] | None: + """Retrieve and decode an encoded field from relation data.""" + data = json.loads(relation.data[member].get(field, "{}")) + if isinstance(data, dict): + return data + logger.error("Unexpected datatype for %s instead of dict.", str(data)) + + +Diff = namedtuple("Diff", ["added", "changed", "deleted"]) +Diff.__doc__ = """ +A tuple for storing the diff between two data mappings. + +added - keys that were added +changed - keys that still exist but have new values +deleted - key that were deleted""" + + +def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: + """Retrieves the diff of the data in the relation changed databag for v1. + + Args: + old_data: dictionary of the stored data before the event. + new_data: dictionary of the received data to compute the diff. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + old_data = old_data or {} + + # These are the keys that were added to the databag and triggered this event. + added = new_data.keys() - old_data.keys() + # These are the keys that were removed from the databag and triggered this event. + deleted = old_data.keys() - new_data.keys() + # These are the keys that already existed in the databag, + # but had their values changed. + changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} + # Return the diff with all possible changes. + return Diff(added, changed, deleted) + + +def resource_added(diff: Diff) -> bool: + """Ensures that one of the aliased resources has been added.""" + return any(item in diff.added for item in RESOURCE_ALIASES + ["resource"]) + + +def store_new_data( + relation: Relation, + component: Unit | Application, + new_data: dict[str, str], + short_uuid: str | None = None, +): + """Stores the new data in the databag for diff computation.""" + # First, the case for V0 + if not short_uuid: + relation.data[component].update({"data": json.dumps(new_data)}) + # Then the case for V1, where we have a ShortUUID + else: + data = json.loads(relation.data[component].get("data", "{}")) + if not isinstance(data, dict): + raise ValueError + newest_data = copy.deepcopy(data) + newest_data[short_uuid] = new_data + relation.data[component].update({"data": json.dumps(newest_data)}) + + +############################################################################## +# Helper classes +############################################################################## + +SecretGroup = NewType("SecretGroup", str) + + +SecretString = TypeAliasType("SecretString", Annotated[str, Field(pattern="secret:.*")]) + + +class SecretBool(_SecretField[bool]): + """Class for booleans as secrets.""" + + _inner_schema: ClassVar[CoreSchema] = core_schema.bool_schema() + _error_kind: ClassVar[str] = "bool_type" + + def _display(self) -> str: + return "****" + + +OptionalSecretStr: TypeAlias = SecretStr | None +OptionalSecretBool: TypeAlias = SecretBool | None + +OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) + +UserSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "user"] +TlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "tls"] +TlsSecretBool = Annotated[OptionalSecretBool, Field(exclude=True, default=None), "tls"] +MtlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mtls"] +ExtraSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "extra"] +EntitySecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "entity"] + + +class Scope(Enum): + """Peer relations scope.""" + + APP = "app" + UNIT = "unit" + + +class CachedSecret: + """Locally cache a secret. + + The data structure is precisely reusing/simulating as in the actual Secret Storage + """ + + KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] + + def __init__( + self, + model: Model, + component: Application | Unit, + label: str, + secret_uri: str | None = None, + ): + self._secret_meta = None + self._secret_content = {} + self._secret_uri = secret_uri + self.label = label + self._model = model + self.component = component + self.current_label = None + + @property + def meta(self) -> Secret | None: + """Getting cached secret meta-information.""" + if not self._secret_meta: + if not (self._secret_uri or self.label): + return + + try: + self._secret_meta = self._model.get_secret(label=self.label) + except SecretNotFoundError: + # Falling back to seeking for potential legacy labels + logger.info(f"Secret with label {self.label} not found") + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) + return self._secret_meta + + ########################################################################## + # Public functions + ########################################################################## + + def add_secret( + self, + content: dict[str, str], + relation: Relation | None = None, + label: str | None = None, + ) -> Secret: + """Create a new secret.""" + if self._secret_uri: + raise SecretAlreadyExistsError( + "Secret is already defined with uri %s", self._secret_uri + ) + + label = self.label if not label else label + + secret = self.component.add_secret(content, label=label) + if relation and relation.app != self._model.app: + # If it's not a peer relation, grant is to be applied + secret.grant(relation) + self._secret_uri = secret.id + self._secret_meta = secret + return self._secret_meta + + def get_content(self) -> dict[str, str]: + """Getting cached secret content.""" + if not self._secret_content: + if self.meta: + try: + self._secret_content = self.meta.get_content(refresh=True) + except (ValueError, ModelError) as err: + # https://bugs.launchpad.net/juju/+bug/2042596 + # Only triggered when 'refresh' is set + if isinstance(err, ModelError) and not any( + msg in str(err) for msg in self.KNOWN_MODEL_ERRORS + ): + raise + # Due to: ValueError: Secret owner cannot use refresh=True + self._secret_content = self.meta.get_content() + return self._secret_content + + def set_content(self, content: dict[str, str]) -> None: + """Setting cached secret content.""" + if not self.meta: + return + + if content == self.get_content(): + return + + if content: + self.meta.set_content(content) + self._secret_content = content + else: + self.meta.remove_all_revisions() + + def get_info(self) -> SecretInfo | None: + """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" + if self.meta: + return self.meta.get_info() + + def remove(self) -> None: + """Remove secret.""" + if not self.meta: + raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") + try: + self.meta.remove_all_revisions() + except SecretNotFoundError: + pass + self._secret_content = {} + self._secret_meta = None + self._secret_uri = None + + +class SecretCache: + """A data structure storing CachedSecret objects.""" + + def __init__(self, model: Model, component: Application | Unit): + self._model = model + self.component = component + self._secrets: dict[str, CachedSecret] = {} + + def get(self, label: str, uri: str | None = None) -> CachedSecret | None: + """Getting a secret from Juju Secret store or cache.""" + if not self._secrets.get(label): + secret = CachedSecret(self._model, self.component, label, uri) + if secret.meta: + self._secrets[label] = secret + return self._secrets.get(label) + + def add(self, label: str, content: dict[str, str], relation: Relation) -> CachedSecret: + """Adding a secret to Juju Secret.""" + if self._secrets.get(label): + raise SecretAlreadyExistsError(f"Secret {label} already exists") + + secret = CachedSecret(self._model, self.component, label) + secret.add_secret(content, relation) + self._secrets[label] = secret + return self._secrets[label] + + def remove(self, label: str) -> None: + """Remove a secret from the cache.""" + if secret := self.get(label): + try: + secret.remove() + self._secrets.pop(label) + except (SecretsUnavailableError, KeyError): + pass + else: + return + logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) + + +############################################################################## +# Models classes +############################################################################## + + +class PeerModel(BaseModel): + """Common Model for all peer relations.""" + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + extra="allow", + ) + + @model_validator(mode="after") + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get("repository") + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + if not secret: + logger.info(f"No secret for group {secret_group}") + continue + + value = secret.get_content().get(aliased_field) + + if value and field_info.annotation == OptionalSecretBool: + value = SecretBool(json.loads(value)) + elif value: + value = SecretStr(value) + setattr(self, field, value) + + return self + + @model_serializer(mode="wrap") + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get("repository") + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + value = getattr(self, field) + + actual_value = ( + value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + + if secret is None: + if value: + secret = repository.add_secret( + aliased_field, + actual_value, + secret_group, + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + continue + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if value is None: + full_content.pop(aliased_field, None) + else: + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + return handler(self) + + +class CommonModel(BaseModel): + """Common Model for both requirer and provider. + + request_id stores the request identifier for easier access. + resource is the requested resource. + """ + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + extra="allow", + ) + + resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") + request_id: str | None = Field(default=None) + salt: str = Field( + description="This salt is used to create unique hashes even when other fields map 1-1", + default_factory=gen_salt, + ) + + @model_validator(mode="after") + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get("repository") + short_uuid = self.request_id or gen_hash(self.resource, self.salt) + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + "-", "_" + ) + secret_uri: str | None = getattr(self, secret_field, None) + + if not secret_uri: + continue + + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + if not secret: + logger.info(f"No secret for group {secret_group} and short uuid {short_uuid}") + continue + + value = secret.get_content().get(aliased_field) + if value and field_info.annotation == OptionalSecretBool: + value = SecretBool(json.loads(value)) + elif value: + value = SecretStr(value) + + setattr(self, field, value) + return self + + @model_serializer(mode="wrap") + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + _encountered_secrets: set[tuple[CachedSecret, str]] = set() + if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get("repository") + short_uuid = self.request_id or gen_hash(self.resource, self.salt) + # Backward compatibility for v0 regarding secrets. + if info.context.get("version") == "v0": + short_uuid = None + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + "-", "_" + ) + secret_uri: str | None = getattr(self, secret_field, None) + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + value = getattr(self, field) + + actual_value = ( + value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value + ) + if not isinstance(actual_value, str): + actual_value = json.dumps(actual_value) + + if secret is None: + if value: + secret = repository.add_secret( + aliased_field, actual_value, secret_group, short_uuid + ) + if not secret or not secret.meta: + raise SecretError("No secret to send back") + setattr(self, secret_field, secret.meta.id) + continue + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if value is None: + full_content.pop(aliased_field, None) + _encountered_secrets.add((secret, secret_field)) + else: + full_content.update({aliased_field: actual_value}) + secret.set_content(full_content) + + # Delete all empty secrets and clean up their fields. + for secret, secret_field in _encountered_secrets: + if not secret.get_content(): + # Setting a field to '' deletes it + setattr(self, secret_field, "") + repository.delete_secret(secret.label) + + return handler(self) + + @classmethod + def _get_secret_field(cls, field: str) -> SecretGroup | None: + """Checks if the field is a secret uri or not.""" + if not field.startswith(SECRET_PREFIX): + return None + + value = field.split("-")[1] + if info := cls.__pydantic_fields__.get(field.replace("-", "_")): + if info.annotation == SecretString: + return SecretGroup(value) + return None + + +class EntityPermissionModel(BaseModel): + """Entity Permissions Model.""" + + resource_name: str + resource_type: str + privileges: list + + +class RequirerCommonModel(CommonModel): + """Requirer side of the request model. + + extra_user_roles is used to request more roles for that user. + external_node_connectivity is used to indicate that the URI should be made for external clients when True + """ + + extra_user_roles: str | None = Field(default=None) + extra_group_roles: str | None = Field(default=None) + external_node_connectivity: bool = Field(default=False) + entity_type: Literal["USER", "GROUP"] | None = Field(default=None) + entity_permissions: list[EntityPermissionModel] | None = Field(default=None) + secret_mtls: SecretString | None = Field(default=None) + mtls_cert: MtlsSecretStr = Field(default=None) + + @model_validator(mode="after") + def validate_fields(self): + """Validates that no inconsistent request is being sent.""" + if self.entity_type and self.entity_type not in ["USER", "GROUP"]: + raise ValueError("Invalid entity-type. Possible values are USER and GROUP") + + if self.entity_type == "USER" and self.extra_group_roles: + raise ValueError("Inconsistent entity information. Use extra_user_roles instead") + + if self.entity_type == "GROUP" and self.extra_user_roles: + raise ValueError("Inconsistent entity information. Use extra_group_roles instead") + + return self + + +class ProviderCommonModel(CommonModel): + """Serialized fields added to the databag. + + endpoints stores the endpoints exposed to that client. + secret_user is a secret URI mapping to the user credentials + secret_tls is a secret URI mapping to the TLS certificate + secret_extra is a secret URI for all additional secrets requested. + """ + + endpoints: str | None = Field(default=None) + read_only_endpoints: str | None = Field(default=None) + secret_user: SecretString | None = Field(default=None) + secret_tls: SecretString | None = Field(default=None) + secret_extra: SecretString | None = Field(default=None) + secret_entity: SecretString | None = Field(default=None) + + +class ResourceProviderModel(ProviderCommonModel): + """Extended model including the deserialized fields.""" + + username: UserSecretStr = Field(default=None) + password: UserSecretStr = Field(default=None) + uris: UserSecretStr = Field(default=None) + read_only_uris: UserSecretStr = Field(default=None) + tls: TlsSecretBool = Field(default=None) + tls_ca: TlsSecretStr = Field(default=None) + entity_name: EntitySecretStr = Field(default=None) + entity_password: EntitySecretStr = Field(default=None) + version: str | None = Field(default=None) + + +class RequirerDataContractV0(RequirerCommonModel): + """Backward compatibility.""" + + version: Literal["v0"] = Field(default="v0") + + original_field: str = Field(exclude=True, default="") + + @model_validator(mode="before") + @classmethod + def ensure_original_field(cls, data: Any): + """Ensures that we keep the original field.""" + if isinstance(data, dict): + for alias in RESOURCE_ALIASES: + if data.get(alias) is not None: + data["original_field"] = alias + break + else: + for alias in RESOURCE_ALIASES: + if getattr(data, alias) is not None: + data.original_field = alias + return data + + +TResourceProviderModel = TypeVar("TResourceProviderModel", bound=ResourceProviderModel) +TRequirerCommonModel = TypeVar("TRequirerCommonModel", bound=RequirerCommonModel) + + +class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): + """The new Data Contract.""" + + version: Literal["v1"] = Field(default="v1") + requests: list[TRequirerCommonModel] + + +def discriminate_on_version(payload: Any) -> str: + """Use the version to discriminate.""" + if isinstance(payload, dict): + return payload.get("version", "v0") + return getattr(payload, "version", "v0") + + +RequirerDataContractType = Annotated[ + Annotated[RequirerDataContractV0, Tag("v0")] | Annotated[RequirerDataContractV1, Tag("v1")], + Discriminator(discriminate_on_version), +] + + +RequirerDataContract = TypeAdapter(RequirerDataContractType) + + +class DataContractV0(ResourceProviderModel): + """The Data contract of the response, for V0.""" + + +class DataContractV1(BaseModel, Generic[TResourceProviderModel]): + """The Data contract of the response, for V1.""" + + version: Literal["v1"] = Field(default="v1") + requests: list[TResourceProviderModel] = Field(default_factory=list) + + +DataContact = TypeAdapter(DataContractV1[ResourceProviderModel]) + + +TCommonModel = TypeVar("TCommonModel", bound=CommonModel) + + +def is_topic_value_acceptable(value: str | None) -> str | None: + """Check whether the given Kafka topic value is acceptable.""" + if value and "*" in value[:3]: + raise ValueError(f"Error on topic '{value}',, unacceptable value.") + return value + + +class KafkaRequestModel(RequirerCommonModel): + """Specialised model for Kafka.""" + + consumer_group_prefix: Annotated[str | None, AfterValidator(is_topic_value_acceptable)] = ( + Field(default=None) + ) + + +class KafkaResponseModel(ResourceProviderModel): + """Kafka response model.""" + + consumer_group_prefix: ExtraSecretStr = Field(default=None) + zookeeper_uris: ExtraSecretStr = Field(default=None) + + +############################################################################## +# AbstractRepository class +############################################################################## + + +class AbstractRepository(ABC): + """Abstract repository interface.""" + + @abstractmethod + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + ... + + @abstractmethod + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> str | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def get_field(self, field: str) -> str | None: + """Gets the value for one field.""" + ... + + @abstractmethod + def get_fields(self, *fields: str) -> dict[str, str | None]: + """Gets the values for all provided fields.""" + ... + + @abstractmethod + def write_field(self, field: str, value: Any) -> None: + """Writes the value in the field, without any secret support.""" + ... + + @abstractmethod + def write_fields(self, mapping: dict[str, Any]) -> None: + """Writes the values of mapping in the fields without any secret support (keys of mapping).""" + ... + + def write_secret_field( + self, field: str, value: Any, group: SecretGroup + ) -> CachedSecret | None: + """Writes a secret field.""" + ... + + @abstractmethod + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def delete_secret(self, label: str): + """Deletes a secret by its label.""" + ... + + @abstractmethod + def delete_field(self, field: str) -> None: + """Deletes a field.""" + ... + + @abstractmethod + def delete_fields(self, *fields: str) -> None: + """Deletes all the provided fields.""" + ... + + @abstractmethod + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + """Delete a field stored in a secret group.""" + ... + + @abstractmethod + def register_secret(self, secret_group: SecretGroup, short_uuid: str | None = None) -> None: + """Registers a secret using the repository.""" + ... + + @abstractmethod + def get_data(self) -> dict[str, Any] | None: + """Gets the whole data.""" + ... + + @abstractmethod + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Builds a secret field.""" + + +class OpsRepository(AbstractRepository): + """Implementation for ops repositories, with some methods left out.""" + + SECRET_FIELD_NAME: str + + IGNORES_GROUPS: list[SecretGroup] = [] + + uri_to_databag: bool = True + + def __init__( + self, + model: Model, + relation: Relation | None, + component: Unit | Application, + ): + self._local_app = model.app + self._local_unit = model.unit + self.relation = relation + self.component = component + self.model = model + self.secrets = SecretCache(model, component) + + @abstractmethod + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group mapping for secrets within a relation context.""" + ... + + @override + def get_data(self) -> dict[str, Any] | None: + ret: dict[str, Any] = {} + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + for key, value in self.relation.data[self.component].items(): + try: + ret[key] = json.loads(value) + except json.JSONDecodeError: + ret[key] = value + + return ret + + @override + @ensure_leader_for_app + def get_field( + self, + field: str, + ) -> str | None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + relation_data = self.relation.data[self.component] + return relation_data.get(field) + + @override + @ensure_leader_for_app + def get_fields(self, *fields: str) -> dict[str, str]: + res = {} + for field in fields: + if (value := self.get_field(field)) is not None: + res[field] = value + return res + + @override + @ensure_leader_for_app + def write_field(self, field: str, value: Any) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + if not value: + return None + self.relation.data[self.component].update({field: value}) + + @override + @ensure_leader_for_app + def write_fields(self, mapping: dict[str, Any]) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + (self.write_field(field, value) for field, value in mapping.items()) + + @override + @ensure_leader_for_app + def write_secret_field( + self, field: str, value: Any, secret_group: SecretGroup + ) -> CachedSecret | None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = self.get_field(self.secret_field(secret_group, field)) + + secret = self.secrets.get(label=label, uri=secret_uri) + if not secret: + return self.add_secret(field, value, secret_group) + else: + content = secret.get_content() + full_content = copy.deepcopy(content) + full_content.update({field: value}) + secret.set_content(full_content) + return secret + + @override + @ensure_leader_for_app + def delete_field(self, field: str) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + relation_data = self.relation.data[self.component] + try: + relation_data.pop(field) + except KeyError: + logger.debug( + f"Non existent field {field} was attempted to be removed from the databag (relation ID: {self.relation.id})" + ) + + @override + @ensure_leader_for_app + def delete_fields(self, *fields: str) -> None: + (self.delete_field(field) for field in fields) + + @override + @ensure_leader_for_app + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + relation_data = self.relation.data[self.component] + secret_field = self.secret_field(secret_group, field) + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = relation_data.get(secret_field) + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logging.error(f"Can't delete secret for relation {self.relation.id}") + return None + + content = secret.get_content() + new_content = copy.deepcopy(content) + try: + new_content.pop(field) + except KeyError: + logging.debug( + f"Non-existing secret '{field}' was attempted to be removed" + f"from relation {self.relation.id} and group {secret_group}" + ) + + # Write the new secret content if necessary + if new_content: + secret.set_content(new_content) + return + + # Remove the secret from the relation if it's fully gone. + try: + relation_data.pop(field) + except KeyError: + pass + self.secrets.remove(label) + return + + @ensure_leader_for_app + def register_secret(self, uri: str, secret_group: SecretGroup, short_uuid: str | None = None): + """Registers the secret group for this relation. + + [MAGIC HERE] + If we fetch a secret using get_secret(id=, label=), + then will be "stuck" on the Secret object, whenever it may + appear (i.e. as an event attribute, or fetched manually) on future occasions. + + This will allow us to uniquely identify the secret on Provider side (typically on + 'secret-changed' events), and map it to the corresponding relation. + """ + if not self.relation: + raise ValueError("Cannot register without relation.") + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + CachedSecret(self.model, self.component, label, uri).meta + + @override + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + return None + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + return self.secrets.get(label=label, uri=secret_uri) + + @override + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + uri: str | None = None, + short_uuid: str | None = None, + ) -> Any | None: + """Gets a value for a field stored in a secret group.""" + if not self.relation: + logger.info("No relation to get value from") + return None + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + return None + + secret_field = self.secret_field(secret_group, field) + + relation_data = self.relation.data[self.component] + secret_uri = uri or relation_data.get(secret_field) + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + if self.uri_to_databag and not secret_uri: + logger.info(f"No secret for group {secret_group} in relation {self.relation}") + return None + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logger.info(f"No secret for group {secret_group} in relation {self.relation}") + return None + + content = secret.get_content().get(field) + + if not content: + return + + try: + return json.loads(content) + except json.JSONDecodeError: + return content + + @override + @ensure_leader_for_app + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + if not self.relation: + logger.info("No relation to get value from") + return None + + if self.component not in self.relation.data: + logger.info(f"Component {self.component} not in relation {self.relation}") + return None + + if secret_group in self.IGNORES_GROUPS: + logger.warning(f"Trying to get invalid secret group {secret_group}") + + label = self._generate_secret_label(self.relation, secret_group, short_uuid) + + secret = self.secrets.add(label, {field: value}, self.relation) + + if not secret.meta or not secret.meta.id: + logging.error("Secret is missing Secret ID") + raise SecretError("Secret added but is missing Secret ID") + + return secret + + @override + @ensure_leader_for_app + def delete_secret(self, label: str) -> None: + self.secrets.remove(label) + + +@final +class OpsRelationRepository(OpsRepository): + """Implementation of the Abstract Repository for non peer relations.""" + + SECRET_FIELD_NAME: str = "secret" + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + if short_uuid: + return f"{relation.name}.{relation.id}.{short_uuid}.{secret_group}.secret" + return f"{relation.name}.{relation.id}.{secret_group}.secret" + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + return f"{self.SECRET_FIELD_NAME}-{secret_group}" + + +class OpsPeerRepository(OpsRepository): + """Implementation of the Ops Repository for peer relations.""" + + SECRET_FIELD_NAME = "internal_secret" + + IGNORES_GROUPS = [ + SecretGroup("user"), + SecretGroup("entity"), + SecretGroup("mtls"), + SecretGroup("tls"), + ] + + uri_to_databag: bool = False + + @property + def scope(self) -> Scope: + """Returns a scope.""" + if isinstance(self.component, Application): + return Scope.APP + if isinstance(self.component, Unit): + return Scope.UNIT + raise ValueError("Invalid component, neither a Unit nor an Application") + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + members = [relation.name, self._local_app.name, self.scope.value] + + if secret_group != SecretGroup("extra"): + members.append(secret_group) + return f"{'.'.join(members)}" + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + if not field: + raise ValueError("Must have a field.") + return f"{field}@{secret_group}" + + +@final +class OpsPeerUnitRepository(OpsPeerRepository): + """Implementation for a unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + super().__init__(model, relation, component) + + +@final +class OpsOtherPeerUnitRepository(OpsPeerRepository): + """Implementation for a remote unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + if component == model.unit: + raise ValueError(f"Can't instantiate {self.__class__.__name__} with local unit.") + super().__init__(model, relation, component) + + @override + def write_field(self, field: str, value: Any) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def write_fields(self, mapping: dict[str, Any]) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def add_secret( + self, field: str, value: Any, secret_group: SecretGroup, short_uuid: str | None = None + ) -> CachedSecret | None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_field(self, field: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_fields(self, *fields: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + +TRepository = TypeVar("TRepository", bound=OpsRepository) +TCommon = TypeVar("TCommon", bound=BaseModel) +TPeerCommon = TypeVar("TPeerCommon", bound=PeerModel) +TCommonBis = TypeVar("TCommonBis", bound=BaseModel) + + +class RepositoryInterface(Generic[TRepository, TCommon]): + """Repository builder.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + component: Unit | Application, + repository_type: type[TRepository], + model: type[TCommon] | TypeAdapter | None, + ): + self.charm = charm + self._model = charm.model + self.repository_type = repository_type + self.relation_name = relation_name + self.model = model + self.component = component + + @property + def relations(self) -> list[Relation]: + """The list of Relation instances associated with this relation name.""" + return self._model.relations[self.relation_name] + + def repository( + self, relation_id: int, component: Unit | Application | None = None + ) -> TRepository: + """Returns a repository for the relation.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + return self.repository_type(self._model, relation, component or self.component) + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommon], + component: Unit | Application | None = None, + ) -> TCommon: ... + + @overload + def build_model( + self, + relation_id: int, + model: TypeAdapter[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: None = None, + component: Unit | Application | None = None, + ) -> TCommon: ... + + def build_model( + self, + relation_id: int, + model: type[TCommon] | TypeAdapter[TCommonBis] | None = None, + component: Unit | Application | None = None, + ) -> TCommon | TCommonBis: + """Builds a model using the repository for that relation.""" + model = model or self.model # First the provided model (allows for specialisation) + component = component or self.component + if not model: + raise ValueError("Missing model to specialise data") + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + return build_model(self.repository_type(self._model, relation, component), model) + + def write_model( + self, relation_id: int, model: BaseModel, context: dict[str, str] | None = None + ): + """Writes the model using the repository.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError("Missing relation.") + + write_model( + self.repository_type(self._model, relation, self.component), model, context=context + ) + + +class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, TCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.app, OpsRelationRepository, model) + + +class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.app, OpsPeerRepository, model) + + +class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): + """Specialised Interface to build repositories for this unit peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, charm.unit, OpsPeerUnitRepository, model) + + +class OpsOtherPeerUnitRepositoryInterface( + RepositoryInterface[OpsOtherPeerUnitRepository, TPeerCommon] +): + """Specialised Interface to build repositories for another unit peer relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + unit: Unit, + model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(charm, relation_name, unit, OpsOtherPeerUnitRepository, model) + + +############################################################################## +# DDD implementation methods +############################################################################## +############################################################################## + + +def build_model(repository: AbstractRepository, model: type[TCommon] | TypeAdapter) -> TCommon: + """Builds a common model using the provided repository and provided model structure.""" + data = repository.get_data() or {} + + data.pop("data", None) + + # Beware this means all fields should have a default value here. + if isinstance(model, TypeAdapter): + return model.validate_python(data, context={"repository": repository}) + + return model.model_validate(data, context={"repository": repository}) + + +def write_model( + repository: AbstractRepository, model: BaseModel, context: dict[str, str] | None = None +): + """Writes the data stored in the model using the repository object.""" + context = context or {} + dumped = model.model_dump( + mode="json", context={"repository": repository} | context, exclude_none=False + ) + for field, value in dumped.items(): + if value is None: + repository.delete_field(field) + continue + dumped_value = value if isinstance(value, str) else json.dumps(value) + repository.write_field(field, dumped_value) + + +############################################################################## +# Custom Events +############################################################################## + + +class ResourceProviderEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.request = request + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["request"] = pickle.dumps(self.request) + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.request = pickle.loads(snapshot["request"]) + + +class ResourceRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource requested event.""" + + pass + + +class ResourceEntityRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource Entity requested event.""" + + pass + + +class ResourceEntityPermissionsChangedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + pass + + +class MtlsCertUpdatedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + old_mtls_cert: str | None = None, + ): + super().__init__(handle, relation, app, unit, request) + + self.old_mtls_cert = old_mtls_cert + + def snapshot(self): + """Return a snapshot of the event.""" + return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} + + def restore(self, snapshot): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.old_mtls_cert = snapshot["old_mtls_cert"] + + +class BulkResourcesRequestedEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + requests: list[TRequirerCommonModel], + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.requests = requests + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["requests"] = [pickle.dumps(request) for request in self.requests] + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.requests = [pickle.loads(request) for request in snapshot["requests"]] + + +class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + bulk_resources_requested = EventSource(BulkResourcesRequestedEvent) + resource_requested = EventSource(ResourceRequestedEvent) + resource_entity_requested = EventSource(ResourceEntityRequestedEvent) + resource_entity_permissions_changed = EventSource(ResourceEntityPermissionsChangedEvent) + mtls_cert_updated = EventSource(MtlsCertUpdatedEvent) + + +class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): + """Resource created/changed event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, response + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + response: TResourceProviderModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.response = response + + def snapshot(self) -> dict: + """Save the event information.""" + snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} + if self.app: + snapshot["app_name"] = self.app.name + if self.unit: + snapshot["unit_name"] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot["response"] = pickle.dumps(self.response) + return snapshot + + def restore(self, snapshot: dict): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot["relation_name"], snapshot["relation_id"] + ) + if not relation: + raise ValueError("Missing relation") + self.relation = relation + self.app = None + app_name = snapshot.get("app_name") + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get("unit_name") + if unit_name: + self.app = self.framework.model.get_app(unit_name) + + self.response = pickle.loads(snapshot["response"]) + + +class ResourceCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource has been created.""" + + pass + + +class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource entity has been created.""" + + pass + + +class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read/Write enpoints are changed.""" + + pass + + +class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read-only enpoints are changed.""" + + pass + + +class AuthenticationUpdatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Authentication was updated for a user.""" + + pass + + +class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + resource_created = EventSource(ResourceCreatedEvent) + resource_entity_created = EventSource(ResourceEntityCreatedEvent) + endpoints_changed = EventSource(ResourceEndpointsChangedEvent) + read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) + authentication_updated = EventSource(AuthenticationUpdatedEvent) + + +############################################################################## +# Event Handlers +############################################################################## + + +class EventHandlers(Object): + """Requires-side of the relation.""" + + component: Application | Unit + interface: RepositoryInterface + + def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ""): + """Manager of base client relations.""" + if not unique_key: + unique_key = relation_name + super().__init__(charm, unique_key) + + self.charm = charm + self.relation_name = relation_name + + self.framework.observe( + charm.on[self.relation_name].relation_changed, + self._on_relation_changed_event, + ) + + self.framework.observe( + self.charm.on[self.relation_name].relation_created, + self._on_relation_created_event, + ) + + self.framework.observe( + charm.on.secret_changed, + self._on_secret_changed_event, + ) + + @property + def relations(self) -> list[Relation]: + """Shortcut to get access to the relations.""" + return self.interface.relations + + # Event handlers + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + pass + + @abstractmethod + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _handle_event( + self, + ): + """Handles the event and reacts accordingly.""" + pass + + def compute_diff( + self, + relation: Relation, + request: RequirerCommonModel | ResourceProviderModel, + repository: AbstractRepository | None = None, + store: bool = True, + ) -> Diff: + """Computes, stores and returns a diff for that request.""" + if not repository: + repository = OpsRelationRepository(self.model, relation, component=relation.app) + + # Gets the data stored in the databag for diff computation + old_data = get_encoded_dict(relation, self.component, "data") + + # In case we're V1, we select specifically this request + if old_data and request.request_id: + old_data: dict | None = old_data.get(request.request_id, None) + + # dump the data of the current request so we can compare + new_data = request.model_dump( + mode="json", + exclude={"data"}, + exclude_none=True, + exclude_defaults=True, + ) + + # Computes the diff + _diff = diff(old_data, new_data) + + if store: + # Update the databag with the new data for later diff computations + store_new_data(relation, self.component, new_data, short_uuid=request.request_id) + + return _diff + + def _relation_from_secret_label(self, secret_label: str) -> Relation | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 3): + return + + try: + relation_id = int(contents[1]) + except ValueError: + return + + relation_name = contents[0] + + try: + return self.model.get_relation(relation_name, relation_id) + except ModelError: + return + + def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 5): + return + + return contents[2] + + +class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): + """Event Handler for resource provider.""" + + on = ResourceProvidesEvents[TRequirerCommonModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + request_model: type[TRequirerCommonModel], + unique_key: str = "", + mtls_enabled: bool = False, + bulk_event: bool = False, + ): + """Builds a resource provider event handler. + + Args: + charm: The charm. + relation_name: The relation name this event handler is listening to. + request_model: The request model that is expected to be received. + unique_key: An optional unique key for that object. + mtls_enabled: If True, means the server supports MTLS integration. + bulk_event: If this is true, only one event will be emitted with all requests in the case of a v1 requirer. + """ + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.app + self.request_model = request_model + self.interface = OpsRelationRepositoryInterface(charm, relation_name, request_model) + self.mtls_enabled = mtls_enabled + self.bulk_event = bulk_event + + @staticmethod + def _validate_diff(event: RelationEvent, _diff: Diff) -> None: + """Validates that entity information is not changed after relation is established. + + - When entity-type changes, backwards compatibility is broken. + - When extra-user-roles changes, role membership checks become incredibly complex. + - When extra-group-roles changes, role membership checks become incredibly complex. + """ + if not isinstance(event, RelationChangedEvent): + return + + for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: + if key in _diff.changed: + raise ValueError(f"Cannot change {key} after relation has already been created") + + def _dispatch_events(self, event: RelationEvent, _diff: Diff, request: RequirerCommonModel): + if self.mtls_enabled and "secret-mtls" in _diff.added: + getattr(self.on, "mtls_cert_updated").emit( + event.relation, app=event.app, unit=event.unit, request=request, old_mtls_cert=None + ) + return + # Emit a resource requested event if the setup key (resource name) + # was added to the relation databag, but the entity-type key was not. + if resource_added(_diff) and "entity-type" not in _diff.added: + getattr(self.on, "resource_requested").emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (resource name) + # was added to the relation databag, in addition to the entity-type key. + if resource_added(_diff) and "entity-type" in _diff.added: + getattr(self.on, "resource_entity_requested").emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (resource name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + not resource_added(_diff) + and "entity-type" not in _diff.added + and ("entity-permissions" in _diff.added or "entity-permissions" in _diff.changed) + ): + getattr(self.on, "resource_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit, request=request + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request: RequirerCommonModel, + ): + _diff = self.compute_diff(event.relation, request, repository) + + self._validate_diff(event, _diff) + self._dispatch_events(event, _diff, request) + + def _handle_bulk_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request_model: RequirerDataContractV1[TRequirerCommonModel], + ): + """Validate all the diffs, then dispatch the bulk event AND THEN stores the diff. + + This allows for the developer to process the diff and store it themselves + """ + for request in request_model.requests: + # Compute the diff without storing it so we can validate the diffs. + _diff = self.compute_diff(event.relation, request, repository, store=False) + self._validate_diff(event, _diff) + + getattr(self.on, "bulk_resources_requested").emit( + event.relation, app=event.app, unit=event.unit, requests=request_model.requests + ) + + # Store all the diffs if they were not already stored. + for request in request_model.requests: + new_data = request.model_dump( + mode="json", + exclude={"data"}, + context={"repository": repository}, + exclude_none=True, + exclude_defaults=True, + ) + store_new_data(event.relation, self.component, new_data, request.request_id) + + @override + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + if not self.mtls_enabled: + logger.info("MTLS is disabled, exiting early.") + return + if not event.secret.label: + return + + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + old_mtls_cert = event.secret.get_content().get("mtls-cert") + logger.info("mtls-cert-updated") + + # V0, just fire the event. + if version == "v0": + request = build_model(repository, RequirerDataContractV0) + # V1, find the corresponding request. + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if not short_uuid: + return + for _request in request_model.requests: + if _request.request_id == short_uuid: + request = _request + break + else: + logger.info(f"Unknown request id {short_uuid}") + return + + getattr(self.on, "mtls_cert_updated").emit( + relation, + app=relation.app, + unit=remote_unit, + request=request, + mtls_cert=old_mtls_cert, + ) + + @override + def _on_relation_changed_event(self, event: RelationChangedEvent): + if not self.charm.unit.is_leader(): + return + + repository = OpsRelationRepository( + self.model, event.relation, component=event.relation.app + ) + + # Don't do anything until we get some data + if not repository.get_data(): + return + + version = repository.get_field("version") or "v0" + if version == "v0": + request_model = build_model(repository, RequirerDataContractV0) + old_name = request_model.original_field + request_model.request_id = None # For safety, let's ensure that we don't have a model. + self._handle_event(event, repository, request_model) + logger.info( + f"Patching databag for v0 compatibility: replacing 'resource' by '{old_name}'" + ) + self.interface.repository( + event.relation.id, + ).write_field(old_name, request_model.resource) + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if self.bulk_event: + self._handle_bulk_event(event, repository, request_model) + return + for request in request_model.requests: + self._handle_event(event, repository, request) + + def set_response(self, relation_id: int, response: ResourceProviderModel): + r"""Sets a response in the databag. + + This function will react accordingly to the version number. + If the version number is v0, then we write the data directly in the databag. + If the version number is v1, then we write the data in the list of responses. + + /!\ This function updates a response if it was already present in the databag! + + Args: + relation_id: The specific relation id for that event. + response: The response to write in the databag. + """ + if not self.charm.unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + raise ValueError("Missing relation.") + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + if version == "v0": + # Ensure the request_id is None + response.request_id = None + self.interface.write_model( + relation_id, response, context={"version": "v0"} + ) # {"database": "database-name", "secret-user": "uri", ...} + return + + model = self.interface.build_model(relation_id, DataContractV1[response.__class__]) + + # for/else syntax allows to execute the else if break was not called. + # This allows us to update or append easily. + for index, _response in enumerate(model.requests): + if _response.request_id == response.request_id: + model.requests[index] = response + break + else: + model.requests.append(response) + + self.interface.write_model(relation_id, model) + return + + +class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): + """Event Handler for resource requirer.""" + + on = ResourceRequiresEvents[TResourceProviderModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + requests: list[RequirerCommonModel], + response_model: type[TResourceProviderModel], + unique_key: str = "", + relation_aliases: list[str] | None = None, + ): + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.unit + self.relation_aliases = relation_aliases + self._requests = requests + self.response_model = DataContractV1[response_model] + self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( + OpsRelationRepositoryInterface(charm, relation_name, self.response_model) + ) + + if requests: + self._request_model = requests[0].__class__ + else: + self._request_model = RequirerCommonModel + + # First, check that the number of aliases matches the one defined in charm metadata. + if self.relation_aliases: + relation_connection_limit = self.charm.meta.requires[relation_name].limit + if len(self.relation_aliases) != relation_connection_limit: + raise ValueError( + f"Invalid number of aliases, expected {relation_connection_limit}, received {len(self.relation_aliases)}" + ) + + # Created custom event names for each alias. + if self.relation_aliases: + for relation_alias in self.relation_aliases: + self.on.define_event( + f"{relation_alias}_resource_created", + ResourceCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_resource_entity_created", + ResourceEntityCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_endpoints_changed", + ResourceEndpointsChangedEvent, + ) + self.on.define_event( + f"{relation_alias}_read_only_endpoints_changed", + ResourceReadOnlyEndpointsChangedEvent, + ) + + ############################################################################## + # Extra useful functions + ############################################################################## + def is_resource_created( + self, + rel_id: int, + request_id: str, + model: DataContractV1[TResourceProviderModel] | None = None, + ) -> bool: + """Checks if a resource has been created or not. + + Args: + rel_id: The relation id to check. + request_id: The specific request id to check. + model: An optional model to use (for performances). + """ + if not model: + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + for request in model.requests: + if request.request_id == request_id: + return request.secret_user is not None or request.secret_entity is not None + return False + + def are_all_resources_created(self, rel_id: int) -> bool: + """Checks that all resources have been created for a relation. + + Args: + rel_id: The relation id to check. + """ + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + return all( + self.is_resource_created(rel_id, request.request_id, model) + for request in model.requests + if request.request_id + ) + + @staticmethod + def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: + # Actual checking method. + # No need to check for psycopg here, it's been checked before. + if not psycopg2: + return False + + try: + with psycopg2.connect(connection_string) as connection: + with connection.cursor() as cursor: + cursor.execute( + "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) + ) + return cursor.fetchone() is not None + except psycopg2.Error as e: + logger.exception( + f"failed to check whether {plugin} plugin is enabled in the database: %s", + str(e), + ) + return False + + def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: + """Returns whether a plugin is enabled in the database. + + Args: + plugin: name of the plugin to check. + relation_index: Optional index to check the database (default: 0 - first relation). + """ + if not psycopg2: + return False + + # Can't check a non existing relation. + if len(self.relations) <= relation_index: + return False + + relation = self.relations[relation_index] + model = self.interface.build_model(relation_id=relation.id, component=relation.app) + for request in model.requests: + if request.endpoints and request.username and request.password: + host = request.endpoints.split(":")[0] + username = request.username.get_secret_value() + password = request.password.get_secret_value() + + connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" + return self._is_pg_plugin_enabled(plugin, connection_string) + logger.info("No valid request to use to check for plugin.") + return False + + ############################################################################## + # Helpers for aliases + ############################################################################## + + def _assign_relation_alias(self, relation_id: int) -> None: + """Assigns an alias to a relation. + + This function writes in the unit data bag. + + Args: + relation_id: the identifier for a particular relation. + """ + # If no aliases were provided, return immediately. + if not self.relation_aliases: + return + + # Return if an alias was already assigned to this relation + # (like when there are more than one unit joining the relation). + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation and relation.data[self.charm.unit].get("alias"): + return + + # Retrieve the available aliases (the ones that weren't assigned to any relation). + available_aliases = self.relation_aliases[:] + for relation in self.charm.model.relations[self.relation_name]: + alias = relation.data[self.charm.unit].get("alias") + if alias: + logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) + available_aliases.remove(alias) + + # Set the alias in the unit relation databag of the specific relation. + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation: + relation.data[self.charm.unit].update({"alias": available_aliases[0]}) + + # We need to set relation alias also on the application level so, + # it will be accessible in show-unit juju command, executed for a consumer application unit + if relation and self.charm.unit.is_leader(): + relation.data[self.charm.app].update({"alias": available_aliases[0]}) + + def _emit_aliased_event( + self, event: RelationChangedEvent, event_name: str, response: ResourceProviderModel + ): + """Emit all aliased events.""" + alias = self._get_relation_alias(event.relation.id) + if alias: + getattr(self.on, f"{alias}_{event_name}").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + + def _get_relation_alias(self, relation_id: int) -> str | None: + """Gets the relation alias for a relation id.""" + for relation in self.charm.model.relations[self.relation_name]: + if relation.id == relation_id: + return relation.data[self.charm.unit].get("alias") + return None + + ############################################################################## + # Event Handlers + ############################################################################## + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + + response_model = self.interface.build_model(relation.id) + if not short_uuid: + return + for _response in response_model.requests: + if _response.request_id == short_uuid: + response = _response + break + else: + logger.info(f"Unknown request id {short_uuid}") + return + + getattr(self.on, "authentication_updated").emit( + relation, + app=relation.app, + unit=remote_unit, + response=response, + ) + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the database relation is created.""" + super()._on_relation_created_event(event) + + repository = OpsRelationRepository(self.model, event.relation, self.charm.app) + + # If relations aliases were provided, assign one to the relation. + self._assign_relation_alias(event.relation.id) + + if not self.charm.unit.is_leader(): + return + + # Generate all requests id so they are saved already. + for request in self._requests: + request.request_id = gen_hash(request.resource, request.salt) + + full_request = RequirerDataContractV1[self._request_model]( + version="v1", requests=self._requests + ) + write_model(repository, full_request) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the database relation has changed.""" + is_subordinate = False + remote_unit_data = None + for key in event.relation.data.keys(): + if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): + remote_unit_data = event.relation.data[key] + elif isinstance(key, Application) and key.name != self.charm.app.name: + is_subordinate = event.relation.data[key].get("subordinated") == "true" + + if is_subordinate: + if not remote_unit_data or remote_unit_data.get("state") != "ready": + return + + repository = self.interface.repository(event.relation.id, event.app) + response_model = self.interface.build_model(event.relation.id, component=event.app) + + if not response_model.requests: + logger.info("Still waiting for data.") + return + + data = repository.get_field("data") + if not data: + logger.info("Missing data to compute diffs") + return + + request_map = TypeAdapter(dict[str, self._request_model]).validate_json(data) + + for response in response_model.requests: + response_id = response.request_id or gen_hash(response.resource, response.salt) + request = request_map.get(response_id, None) + if not request: + raise ValueError( + f"No request matching the response with response_id {response_id}" + ) + self._handle_event(event, repository, request, response) + + ############################################################################## + # Methods to handle specificities of relation events + ############################################################################## + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: OpsRelationRepository, + request: RequirerCommonModel, + response: ResourceProviderModel, + ): + _diff = self.compute_diff(event.relation, response, repository, store=True) + + for newval in _diff.added: + if secret_group := response._get_secret_field(newval): + uri = getattr(response, newval.replace("-", "_")) + repository.register_secret(uri, secret_group, response.request_id) + + if "secret-user" in _diff.added and not request.entity_type: + logger.info(f"resource {response.resource} created at {datetime.now()}") + getattr(self.on, "resource_created").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "resource_created", response) + return + + if "secret-entity" in _diff.added and request.entity_type: + logger.info(f"entity {response.entity_name} created at {datetime.now()}") + getattr(self.on, "resource_entity_created").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "resource_entity_created", response) + return + + if "endpoints" in _diff.added or "endpoints" in _diff.changed: + logger.info(f"endpoints changed at {datetime.now()}") + getattr(self.on, "endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "endpoints_changed", response) + return + + if "read-only-endpoints" in _diff.added or "read-only-endpoints" in _diff.changed: + logger.info(f"read-only-endpoints changed at {datetime.now()}") + getattr(self.on, "read_only_endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, "read_only_endpoints_changed", response) + return diff --git a/tests/v1/integration/test_charm.py b/tests/v1/integration/test_charm.py index ff91235a..9415f959 100644 --- a/tests/v1/integration/test_charm.py +++ b/tests/v1/integration/test_charm.py @@ -330,13 +330,13 @@ async def test_peer_relation_set_secret(component, ops_test: OpsTest): assert action.results.get("value") == "blablabla" action = await ops_test.model.units.get(unit_name).run_action( - "get-peer-relation-field", **{"component": component, "field": "mygroup-field1@mygroup"} + "get-peer-relation-field", **{"component": component, "field": "mygroup-field1"} ) await action.wait() assert action.results.get("value") == "blablabla3" action = await ops_test.model.units.get(unit_name).run_action( - "get-peer-relation-field", **{"component": component, "field": "mygroup-field2@mygroup"} + "get-peer-relation-field", **{"component": component, "field": "mygroup-field2"} ) await action.wait() assert action.results.get("value") == "blablabla4" @@ -412,7 +412,6 @@ async def test_peer_relation_non_leader_unit_secrets(ops_test: OpsTest): secret = await get_secret_by_label(ops_test, "database-peers.database.unit", unit_name) assert secret.get("secret-field") == "blablabla2" - assert secret.get("monitor-password") == "#DELETED#" action = await ops_test.model.units.get(unit_name).run_action( "get-peer-relation-field", **{"component": "unit", "field": "monitor-password"} @@ -611,9 +610,14 @@ async def test_database_relation_with_charm_libraries(ops_test: OpsTest): # Get the version of the database and compare with the information that # was retrieved directly from the database. - version = await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, "version" + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, "requests" + ) + or "[]" ) + request = requests[0] + version = request["version"] assert version == data[0] @@ -697,48 +701,19 @@ async def test_two_applications_dont_share_the_same_relation_data( assert application_connection_string != another_application_connection_string -@pytest.mark.usefixtures("only_without_juju_secrets") -async def test_databag_usage_correct(ops_test: OpsTest, application_charm): - for field in ["username", "password"]: - assert await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, field - ) - - @pytest.mark.usefixtures("only_with_juju_secrets") -async def test_secrets_usage_correct_secrets(ops_test: OpsTest, application_charm): - for field in ["username", "password", "uris"]: - assert ( - await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, field - ) - is None +async def test_secrets_usage_correct_secrets(ops_test: OpsTest): + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, "requests" ) - assert await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, "secret-user" - ) - - -@pytest.mark.abort_on_fail -@pytest.mark.usefixtures("only_without_juju_secrets") -async def test_database_roles_relation_with_charm_libraries(ops_test: OpsTest): - """Test basic functionality of database-roles relation interface.""" - # Relate the charms and wait for them exchanging some connection data. - - pytest.first_database_relation = await ops_test.model.add_relation( - f"{APPLICATION_APP_NAME}:{ROLES_FIRST_DATABASE_RELATION_NAME}", DATABASE_APP_NAME - ) - await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") - - entity_name = await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, ROLES_FIRST_DATABASE_RELATION_NAME, "entity-name" - ) - entity_pass = await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, ROLES_FIRST_DATABASE_RELATION_NAME, "entity-password" + or "[]" ) + request = requests[0] + for field in ["username", "password", "uris"]: + assert request.get(field) is None - assert entity_name is not None - assert entity_pass is not None + assert request.get("secret-user") @pytest.mark.abort_on_fail @@ -752,13 +727,14 @@ async def test_database_roles_relation_with_charm_libraries_secrets(ops_test: Op ) await ops_test.model.wait_for_idle(apps=APP_NAMES, status="active") - secret_uri = await get_application_relation_data( - ops_test, - APPLICATION_APP_NAME, - ROLES_FIRST_DATABASE_RELATION_NAME, - f"{SECRET_REF_PREFIX}entity", + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, ROLES_FIRST_DATABASE_RELATION_NAME, "requests" + ) + or "[]" ) - + request = requests[0] + secret_uri = request.get(f"{SECRET_REF_PREFIX}entity") secret_content = await get_juju_secret(ops_test, secret_uri) entity_name = secret_content["entity-name"] entity_pass = secret_content["entity-password"] @@ -861,77 +837,24 @@ async def test_an_application_can_request_multiple_databases(ops_test: OpsTest, async def test_external_node_connectivity_field(ops_test: OpsTest, application_charm): # Check that the flag is missing if not requested - assert ( + requests = json.loads( await get_application_relation_data( - ops_test, - DATABASE_APP_NAME, - "database", - "external-node-connectivity", - related_endpoint=DB_FIRST_DATABASE_RELATION_NAME, + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, "requests" ) - ) is None + or "[]" + ) + request = requests[0] + assert request.get("external-node-connectivity") is None # Check that the second relation raises the flag - assert ( + requests = json.loads( await get_application_relation_data( - ops_test, - DATABASE_APP_NAME, - "database", - "external-node-connectivity", - related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "requests" ) - ) == "true" - - -@pytest.mark.usefixtures("only_with_juju_secrets") -async def test_provider_with_additional_secrets(ops_test: OpsTest, database_charm): - # Let's make sure that there was enough time for the relation initialization to communicate secrets - secret_fields = await get_application_relation_data( - ops_test, - DATABASE_APP_NAME, - DATABASE_APP_NAME, - "requested-secrets", - related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, - ) - assert {"topsecret", "donttellanyone"} <= set(json.loads(secret_fields)) - - # Set secret - leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) - leader_name = f"{DATABASE_APP_NAME}/{leader_id}" - action = await ops_test.model.units.get(leader_name).run_action( - "set-secret", **{"relation_id": pytest.second_database_relation.id, "field": "topsecret"} - ) - await action.wait() - - # Get secret original value - secret_uri = await get_application_relation_data( - ops_test, - APPLICATION_APP_NAME, - DB_SECOND_DATABASE_RELATION_NAME, - f"{SECRET_REF_PREFIX}extra", - ) - - secret_content = await get_juju_secret(ops_test, secret_uri) - topsecret1 = secret_content["topsecret"] - - # Re-set secret - action = await ops_test.model.units.get(leader_name).run_action( - "set-secret", **{"relation_id": pytest.second_database_relation.id, "field": "topsecret"} + or "[]" ) - await action.wait() - - # Get secret after change - secret_uri = await get_application_relation_data( - ops_test, - APPLICATION_APP_NAME, - DB_SECOND_DATABASE_RELATION_NAME, - f"{SECRET_REF_PREFIX}extra", - ) - - secret_content = await get_juju_secret(ops_test, secret_uri) - topsecret2 = secret_content["topsecret"] - - assert topsecret1 != topsecret2 + request = requests[0] + assert request.get("external-node-connectivity") == "true" @pytest.mark.abort_on_fail @@ -945,6 +868,15 @@ async def test_relation_secret_revisions(ops_test: OpsTest): rel_id = pytest.second_database_relation.id group_mapping = "extra" + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "requests" + ) + or "[]" + ) + request = requests[0] + request_id = request["request_id"] + # When action = await ops_test.model.units.get(leader_name).run_action( "set-secret", **{"relation_id": rel_id, "field": "topsecret", "value": "initialvalue"} @@ -952,7 +884,7 @@ async def test_relation_secret_revisions(ops_test: OpsTest): await action.wait() original_secret_revision = await get_secret_revision_by_label( - ops_test, f"{DATABASE_APP_NAME}.{rel_id}.{group_mapping}.secret", owner + ops_test, f"{DATABASE_APP_NAME}.{rel_id}.{request_id}.{group_mapping}.secret", owner ) action = await ops_test.model.units.get(leader_name).run_action( @@ -966,7 +898,7 @@ async def test_relation_secret_revisions(ops_test: OpsTest): await action.wait() changed_secret_revision = await get_secret_revision_by_label( - ops_test, f"{DATABASE_APP_NAME}.{rel_id}.{group_mapping}.secret", owner + ops_test, f"{DATABASE_APP_NAME}.{rel_id}.{request_id}.{group_mapping}.secret", owner ) action = await ops_test.model.units.get(leader_name).run_action( @@ -980,7 +912,7 @@ async def test_relation_secret_revisions(ops_test: OpsTest): await action.wait() unchanged_secret_revision = await get_secret_revision_by_label( - ops_test, f"{DATABASE_APP_NAME}.{rel_id}.{group_mapping}.secret", owner + ops_test, f"{DATABASE_APP_NAME}.{rel_id}.{request_id}.{group_mapping}.secret", owner ) # Then @@ -1005,12 +937,14 @@ async def test_provider_get_set_delete_fields(field, value, ops_test: OpsTest): ) await action.wait() - assert ( + requests = json.loads( await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, field + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "requests" ) - == value + or "[]" ) + request = requests[0] + assert request.get(field) == value # Check all application units can read remote relation data for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: @@ -1043,12 +977,14 @@ async def test_provider_get_set_delete_fields(field, value, ops_test: OpsTest): ) await action.wait() - assert ( + requests = json.loads( await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, field + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "requests" ) - is None + or "[]" ) + request = requests[0] + assert request.get(field) is None # Delete non-existent field action = await ops_test.model.units.get(leader_name).run_action( @@ -1091,9 +1027,14 @@ async def test_provider_get_set_delete_fields_secrets( ) await action.wait() - assert await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, relation_field + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "requests" + ) + or "[]" ) + request = requests[0] + assert request.get(relation_field) # Check all application units can read remote relation data for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: @@ -1126,12 +1067,14 @@ async def test_provider_get_set_delete_fields_secrets( ) await action.wait() - assert ( + requests = json.loads( await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, relation_field + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "requests" ) - is None + or "[]" ) + request = requests[0] + assert request.get(relation_field) is None # Check that the field is deleted action = await ops_test.model.units.get(leader_name).run_action( @@ -1235,16 +1178,18 @@ async def test_requires_get_set_delete_fields(ops_test: OpsTest): ) await action.wait() - assert ( + requests = json.loads( await get_application_relation_data( ops_test, - DATABASE_APP_NAME, + APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, - "new_field", + "requests", related_endpoint="second-database-db", ) - == "blah" + or "[]" ) + request = requests[0] + assert request.get("new_field") == "blah" # Check all application units can read remote relation data for unit in ops_test.model.applications[DATABASE_APP_NAME].units: @@ -1277,16 +1222,18 @@ async def test_requires_get_set_delete_fields(ops_test: OpsTest): ) await action.wait() - assert ( + requests = json.loads( await get_application_relation_data( ops_test, - DATABASE_APP_NAME, + APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, - "new_field", + "requests", related_endpoint="second-database-db", ) - is None + or "[]" ) + request = requests[0] + assert request.get("new_field") is None @pytest.mark.log_errors_allowed( @@ -1321,17 +1268,20 @@ async def test_provider_set_delete_fields_leader_only(ops_test: OpsTest): await action.wait() assert await check_logs( ops_test, - strings=[ - "This operation (update_relation_data()) can only be performed by the leader unit" - ], + strings=["This operation (write_field) can only be performed by the leader unit"], ) - assert ( + requests = json.loads( await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "new_field2" + ops_test, + APPLICATION_APP_NAME, + DB_SECOND_DATABASE_RELATION_NAME, + "requests", ) - is None + or "[]" ) + request = requests[0] + assert request.get("new_field2") is None action = await ops_test.model.units.get(unit_name).run_action( "delete-relation-field", @@ -1340,17 +1290,20 @@ async def test_provider_set_delete_fields_leader_only(ops_test: OpsTest): await action.wait() assert await check_logs( ops_test, - strings=[ - "This operation (delete_relation_data()) can only be performed by the leader unit" - ], + strings=["This operation (delete_field) can only be performed by the leader unit"], ) - assert ( + requests = json.loads( await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "new_field" + ops_test, + APPLICATION_APP_NAME, + DB_SECOND_DATABASE_RELATION_NAME, + "requests", ) - == "blah" + or "[]" ) + request = requests[0] + assert request.get("new_field") == "blah" async def test_requires_set_delete_fields(ops_test: OpsTest): @@ -1367,16 +1320,18 @@ async def test_requires_set_delete_fields(ops_test: OpsTest): ) await action.wait() - assert ( + requests = json.loads( await get_application_relation_data( ops_test, DATABASE_APP_NAME, DATABASE_APP_NAME, - "new_field_req", + "requests", related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, ) - == "blah-req" + or "[]" ) + request = requests[0] + assert request.get("new_field_req") == "blah-req" # Delete field action = await ops_test.model.units.get(leader_name).run_action( @@ -1385,16 +1340,18 @@ async def test_requires_set_delete_fields(ops_test: OpsTest): ) await action.wait() - assert ( + requests = json.loads( await get_application_relation_data( ops_test, DATABASE_APP_NAME, DATABASE_APP_NAME, - "new_field_req", + "requests", related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, ) - is None + or "[]" ) + request = requests[0] + assert request.get("new_field_req") is None @pytest.mark.log_errors_allowed( @@ -1429,21 +1386,21 @@ async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): await action.wait() assert await check_logs( ops_test, - strings=[ - "This operation (update_relation_data()) can only be performed by the leader unit" - ], + strings=["This operation (write_field) can only be performed by the leader unit"], ) - assert ( + requests = json.loads( await get_application_relation_data( ops_test, DATABASE_APP_NAME, DATABASE_APP_NAME, - "new_field2-req", + "requests", related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, ) - is None + or "[]" ) + request = requests[0] + assert request.get("new-field2-req") is None action = await ops_test.model.units.get(unit_name).run_action( "delete-relation-field", @@ -1452,9 +1409,7 @@ async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): await action.wait() assert await check_logs( ops_test, - strings=[ - "This operation (delete_relation_data()) can only be performed by the leader unit" - ], + strings=["This operation (write_field()) can only be performed by the leader unit"], ) assert ( @@ -1467,6 +1422,18 @@ async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): ) == "blah-req" ) + requests = json.loads( + await get_application_relation_data( + ops_test, + DATABASE_APP_NAME, + DATABASE_APP_NAME, + "requests", + related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, + ) + or "[]" + ) + request = requests[0] + assert request.get("new-field-req") == "blah-req" async def test_scaling_requires_can_access_shared_secrest(ops_test): diff --git a/tests/v1/integration/test_kafka_charm.py b/tests/v1/integration/test_kafka_charm.py index 47f8e650..af062885 100644 --- a/tests/v1/integration/test_kafka_charm.py +++ b/tests/v1/integration/test_kafka_charm.py @@ -81,10 +81,12 @@ async def test_kafka_relation_with_charm_libraries_secrets(ops_test: OpsTest): request = requests[0] secret_uri = request[f"{PROV_SECRET_PREFIX}user"] secret_data = await get_juju_secret(ops_test, secret_uri) + extra_secret_uri = request[f"{PROV_SECRET_PREFIX}extra"] + extra_secret_data = await get_juju_secret(ops_test, extra_secret_uri) username = secret_data["username"] password = secret_data["password"] bootstrap_server = request["endpoints"] - consumer_group_prefix = request["consumer-group-prefix"] + consumer_group_prefix = extra_secret_data["consumer-group-prefix"] topic = request["resource"] assert username == "admin" diff --git a/tests/v1/unit/test_data_interfaces.py b/tests/v1/unit/test_data_interfaces.py index 2a36fd6f..e65d54b7 100644 --- a/tests/v1/unit/test_data_interfaces.py +++ b/tests/v1/unit/test_data_interfaces.py @@ -1190,18 +1190,26 @@ def test_requires_interface_functions_secrets(self): "version": "v1", "requests": [ { - "resource": "data_platform", + "entity-permissions": None, + "entity-type": None, + "external-node-connectivity": False, + "extra-group-roles": None, + "extra-user-roles": "CREATEDB,CREATEROLE", "request-id": "c759221a6c14c72a", + "resource": "data_platform", "salt": "kkkkkkkk", - "extra-user-roles": "CREATEDB,CREATEROLE", - "external-node-connectivity": False, + "secret-mtls": None, }, { - "resource": "", + "entity-permissions": None, + "entity-type": "USER", + "external-node-connectivity": False, + "extra-group-roles": None, + "extra-user-roles": None, "request-id": "9ecfabfbb5258f88", + "resource": "", "salt": "xxxxxxxx", - "external-node-connectivity": False, - "entity-type": "USER", + "secret-mtls": None, }, ], } @@ -1247,8 +1255,8 @@ def test_on_resource_created_secrets(self, _on_resource_created): # using the requires charm library event. event = _on_resource_created.call_args[0][0] assert event.response.secret_user == secret.id - assert event.response.username == "test-username" - assert event.response.password == "test-password" + assert event.response.username.get_secret_value() == "test-username" + assert event.response.password.get_secret_value() == "test-password" assert self.harness.charm.requirer.is_resource_created( self.rel_id, event.response.request_id @@ -1293,8 +1301,8 @@ def test_on_resource_created_secrets(self, _on_resource_created): # using the requires charm library event. event = _on_resource_created.call_args[0][0] assert event.response.secret_user == secret2.id - assert event.response.username == "test-username-2" - assert event.response.password == "test-password-2" + assert event.response.username.get_secret_value() == "test-username-2" + assert event.response.password.get_secret_value() == "test-password-2" assert self.harness.charm.requirer.is_resource_created(rel_id, event.response.request_id) assert self.harness.charm.requirer.are_all_resources_created(rel_id) @@ -1339,8 +1347,8 @@ def test_on_resource_entity_created_secrets(self, _on_resource_entity_created): # Check that the entity-type, entity-name and entity-password are present in the relation. event = _on_resource_entity_created.call_args[0][0] assert event.response.secret_entity == secret.id - assert event.response.entity_name == "test-username" - assert event.response.entity_password == "test-password" + assert event.response.entity_name.get_secret_value() == "test-username" + assert event.response.entity_password.get_secret_value() == "test-password" # Reset the mock call count. _on_resource_entity_created.reset_mock() @@ -1380,7 +1388,7 @@ def test_on_resource_entity_created_secrets(self, _on_resource_entity_created): # Check that the entity-type and entity-name are present in the relation. event = _on_resource_entity_created.call_args[0][0] assert event.response.secret_entity == secret2.id - assert event.response.entity_name == "test-groupname" + assert event.response.entity_name.get_secret_value() == "test-groupname" assert event.response.entity_password is None def test_fetch_relation_data_secrets_fields(self): @@ -1446,18 +1454,26 @@ def test_fetch_my_relation_data_and_fields_secrets(self): "version": "v1", "requests": [ { + "entity-permissions": None, + "entity-type": None, "salt": "kkkkkkkk", "request-id": "c759221a6c14c72a", "resource": "data_platform", + "extra-group-roles": None, "extra-user-roles": "CREATEDB,CREATEROLE", "external-node-connectivity": False, + "secret-mtls": None, }, { + "entity-permissions": None, "salt": "xxxxxxxx", "request-id": "9ecfabfbb5258f88", "resource": "", "entity-type": "USER", + "extra-group-roles": None, + "extra-user-roles": None, "external-node-connectivity": False, + "secret-mtls": None, }, ], } @@ -1680,8 +1696,8 @@ def test_additional_fields_are_accessible(self, _on_resource_created): # Check that the fields are present in the relation # using the requires charm library. assert event.response.tls.get_secret_value() is True - assert event.response.tls_ca == "deadbeef" - assert event.response.uris == "host1:port,host2:port" + assert event.response.tls_ca.get_secret_value() == "deadbeef" + assert event.response.uris.get_secret_value() == "host1:port,host2:port" assert event.response.version == "1.0" def test_assign_relation_alias(self): From e4abfa1142e3aa6944d2896217e7251457596645 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Wed, 17 Sep 2025 15:05:27 +0200 Subject: [PATCH 04/34] fix: more fixes --- .../data_platform_libs/v1/data_interfaces.py | 2 +- .../data_platform_libs/v1/data_interfaces.py | 2753 ----------------- .../application-charm/src/charm.py | 13 +- .../integration/database-charm/src/charm.py | 22 +- .../data_platform_libs/v1/data_interfaces.py | 2753 ----------------- .../data_platform_libs/v1/data_interfaces.py | 2753 ----------------- .../data_platform_libs/v1/data_interfaces.py | 2753 ----------------- .../data_platform_libs/v1/data_interfaces.py | 2753 ----------------- tests/v1/integration/test_charm.py | 106 +- tox.ini | 1 + 10 files changed, 36 insertions(+), 13873 deletions(-) delete mode 100644 tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/data_interfaces.py delete mode 100644 tests/v1/integration/dummy-database-charm/lib/charms/data_platform_libs/v1/data_interfaces.py delete mode 100644 tests/v1/integration/kafka-charm/lib/charms/data_platform_libs/v1/data_interfaces.py delete mode 100644 tests/v1/integration/kafka-connect-charm/lib/charms/data_platform_libs/v1/data_interfaces.py delete mode 100644 tests/v1/integration/opensearch-charm/lib/charms/data_platform_libs/v1/data_interfaces.py diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index e22388d1..c4f13a85 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -2625,7 +2625,7 @@ def _on_secret_changed_event(self, event: SecretChangedEvent): remote_unit = unit break - response_model = self.interface.build_model(relation.id) + response_model = self.interface.build_model(relation.id, component=relation.app) if not short_uuid: return for _response in response_model.requests: diff --git a/tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/data_interfaces.py b/tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/data_interfaces.py deleted file mode 100644 index e22388d1..00000000 --- a/tests/v1/integration/application-charm/lib/charms/data_platform_libs/v1/data_interfaces.py +++ /dev/null @@ -1,2753 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -r"""Library to manage the relation for the data-platform products. - -This V1 has been specified in https://docs.google.com/document/d/1lnuonWnoQb36RWYwfHOBwU0VClLbawpTISXIC_yNKYo, and should be backward compatible with v0 clients. - -This library contains the Requires and Provides classes for handling the relation -between an application and multiple managed application supported by the data-team: -MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. - -#### Models - -This library exposes basic default models that can be used in most cases. -If you need more complex models, you can subclass them. - -```python -from charms.data_platform_libs.v1.data_interfaces import RequirerCommonModel, ExtraSecretStr - -class ExtendedCommonModel(RequirerCommonModel): - operator_password: ExtraSecretStr -``` - -Secret groups are handled using annotated types. If you wish to add extra secret groups, please follow the following model. The string metadata represents the secret group name, and `OptionalSecretStr` is a TypeAlias for `SecretStr | None`. Finally, `SecretStr` represents a field validating the URI pattern `secret:.*` - -```python -MyGroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] -``` - -Fields not specified as OptionalSecretStr and extended with a group name in the metadata will NOT get serialised. - - -#### Requirer Charm - -This library is a uniform interface to a selection of common database -metadata, with added custom events that add convenience to database management, -and methods to consume the application related data. - - -```python -from charms.data_platform_libs.v1.data_interfaces import ( - RequirerCommonModel, - RequirerDataContractV1, - ResourceCreatedEvent, - ResourceEntityCreatedEvent, - ResourceProviderModel, - ResourceRequirerEventHandler, -) - -class ClientCharm(CharmBase): - # Database charm that accepts connections from application charms. - def __init__(self, *args) -> None: - super().__init__(*args) - - requests = [ - RequirerCommonModel( - resource="clientdb", - ), - RequirerCommonModel( - resource="clientbis", - ), - RequirerCommonModel( - entity_type="USER", - ) - ] - self.database = ResourceRequirerEventHandler( - self,"database", requests, response_model=ResourceProviderModel - ) - self.framework.observe(self.database.on.resource_created, self._on_resource_created) - self.framework.observe(self.database.on.resource_entity_created, self._on_resource_entity_created) - - def _on_resource_created(self, event: ResourceCreatedEvent) -> None: - # Event triggered when a new database is created. - relation_id = event.relation.id - response = event.response # This is the response model - - username = event.response.username - password = event.response.password - ... - - def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: - # Event triggered when a new entity is created. - ... - -Compared to V1, this library makes heavy use of pydantic models, and allows for -multiple requests, specified as a list. -On the Requirer side, each response will trigger one custom event for that response. -This way, it allows for more strategic events to be emitted according to the request. - -As show above, the library provides some custom events to handle specific situations, which are listed below: -- resource_created: event emitted when the requested database is created. -- resource_entity_created: event emitted when the requested entity is created. -- endpoints_changed: event emitted when the read/write endpoints of the database have changed. -- read_only_endpoints_changed: event emitted when the read-only endpoints of the database - have changed. Event is not triggered if read/write endpoints changed too. - -If it is needed to connect multiple database clusters to the same relation endpoint -the application charm can implement the same code as if it would connect to only -one database cluster (like the above code example). - -To differentiate multiple clusters connected to the same relation endpoint -the application charm can use the name of the remote application: - -```python - -def _on_resource_created(self, event: ResourceCreatedEvent) -> None: - # Get the remote app name of the cluster that triggered this event - cluster = event.relation.app.name -``` - -It is also possible to provide an alias for each different database cluster/relation. - -So, it is possible to differentiate the clusters in two ways. -The first is to use the remote application name, i.e., `event.relation.app.name`, as above. - -The second way is to use different event handlers to handle each cluster events. -The implementation would be something like the following code: - -```python - -from charms.data_platform_libs.v1.data_interfaces import ( - RequirerCommonModel, - RequirerDataContractV1, - ResourceCreatedEvent, - ResourceEntityCreatedEvent, - ResourceProviderModel, - ResourceRequirerEventHandler, -) - -class ApplicationCharm(CharmBase): - # Application charm that connects to database charms. - - def __init__(self, *args): - super().__init__(*args) - - requests = [ - RequirerCommonModel( - resource="clientdb", - ), - RequirerCommonModel( - resource="clientbis", - ), - ] - # Define the cluster aliases and one handler for each cluster database created event. - self.database = ResourceRequirerEventHandler( - self, - relation_name="database" - relations_aliases = ["cluster1", "cluster2"], - requests= - ) - self.framework.observe( - self.database.on.cluster1_resource_created, self._on_cluster1_resource_created - ) - self.framework.observe( - self.database.on.cluster2_resource_created, self._on_cluster2_resource_created - ) - - def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: - # Handle the created database on the cluster named cluster1 - - # Create configuration file for app - config_file = self._render_app_config_file( - event.response.username, - event.response.password, - event.response.endpoints, - ) - ... - - def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: - # Handle the created database on the cluster named cluster2 - - # Create configuration file for app - config_file = self._render_app_config_file( - event.response.username, - event.response.password, - event.response.endpoints, - ) - ... -``` - -### Provider Charm - -Following an example of using the ResourceRequestedEvent, in the context of the -database charm code: - -```python -from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides - -class SampleCharm(CharmBase): - - def __init__(self, *args): - super().__init__(*args) - # Charm events defined in the database provides charm library. - self.provided_database = DatabaseProvides(self, relation_name="database") - self.framework.observe(self.provided_database.on.database_requested, - self._on_database_requested) - # Database generic helper - self.database = DatabaseHelper() - - def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: - # Handle the event triggered by a new database requested in the relation - # Retrieve the database name using the charm library. - db_name = event.database - # generate a new user credential - username = self.database.generate_user() - password = self.database.generate_password() - # set the credentials for the relation - self.provided_database.set_credentials(event.relation.id, username, password) - # set other variables for the relation event.set_tls("False") -``` - -As shown above, the library provides a custom event (database_requested) to handle -the situation when an application charm requests a new database to be created. -It's preferred to subscribe to this event instead of relation changed event to avoid -creating a new database when other information other than a database name is -exchanged in the relation databag. - -""" - -import copy -import hashlib -import json -import logging -import pickle -import random -import string -from abc import ABC, abstractmethod -from collections import namedtuple -from datetime import datetime -from enum import Enum -from typing import ( - Annotated, - Any, - ClassVar, - Generic, - Literal, - NewType, - TypeAlias, - TypeVar, - final, - overload, -) - -from ops import ( - CharmBase, - EventBase, - Model, - RelationChangedEvent, - RelationCreatedEvent, - RelationEvent, - Secret, - SecretChangedEvent, - SecretInfo, - SecretNotFoundError, -) -from ops.charm import CharmEvents -from ops.framework import EventSource, Handle, Object -from ops.model import Application, ModelError, Relation, Unit -from pydantic import ( - AfterValidator, - AliasChoices, - BaseModel, - ConfigDict, - Discriminator, - Field, - SecretStr, - SerializationInfo, - SerializerFunctionWrapHandler, - Tag, - TypeAdapter, - ValidationInfo, - model_serializer, - model_validator, -) -from pydantic.types import _SecretBase, _SecretField -from pydantic_core import CoreSchema, core_schema -from typing_extensions import TypeAliasType, override - -try: - import psycopg2 -except ImportError: - psycopg2 = None - -# The unique Charmhub library identifier, never change it -LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" - -# Increment this major API version when introducing breaking changes -LIBAPI = 1 - -# Increment this PATCH version before using `charmcraft publish-lib` or reset -# to 0 if you are raising the major API version -LIBPATCH = 0 - -PYDEPS = ["ops>=2.0.0", "pydantic>=2.11"] - -logger = logging.getLogger(__name__) - -MODEL_ERRORS = { - "not_leader": "this unit is not the leader", - "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", - "owner_no_refresh": "ERROR secret owner cannot use --refresh", -} - -RESOURCE_ALIASES = [ - "database", - "subject", - "topic", - "index", - "plugin-url", -] - -SECRET_PREFIX = "secret-" - - -############################################################################## -# Exceptions -############################################################################## - - -class DataInterfacesError(Exception): - """Common ancestor for DataInterfaces related exceptions.""" - - -class SecretError(DataInterfacesError): - """Common ancestor for Secrets related exceptions.""" - - -class SecretAlreadyExistsError(SecretError): - """A secret that was to be added already exists.""" - - -class SecretsUnavailableError(SecretError): - """Secrets aren't yet available for Juju version used.""" - - -class IllegalOperationError(DataInterfacesError): - """To be used when an operation is not allowed to be performed.""" - - -############################################################################## -# Global helpers / utilities -############################################################################## - - -def gen_salt() -> str: - """Generates a consistent salt.""" - return "".join(random.choices(string.ascii_letters + string.digits, k=16)) - - -def gen_hash(resource_name: str, salt: str) -> str: - """Generates a consistent hash based on the resource name and salt.""" - hasher = hashlib.sha256() - hasher.update(f"{resource_name}:{salt}".encode()) - return hasher.hexdigest()[:16] - - -def ensure_leader_for_app(f): - """Decorator to ensure that only leader can perform given operation.""" - - def wrapper(self, *args, **kwargs): - if self.component == self._local_app and not self._local_unit.is_leader(): - logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") - return - return f(self, *args, **kwargs) - - wrapper.leader_only = True - return wrapper - - -def get_encoded_dict( - relation: Relation, member: Unit | Application, field: str -) -> dict[str, Any] | None: - """Retrieve and decode an encoded field from relation data.""" - data = json.loads(relation.data[member].get(field, "{}")) - if isinstance(data, dict): - return data - logger.error("Unexpected datatype for %s instead of dict.", str(data)) - - -Diff = namedtuple("Diff", ["added", "changed", "deleted"]) -Diff.__doc__ = """ -A tuple for storing the diff between two data mappings. - -added - keys that were added -changed - keys that still exist but have new values -deleted - key that were deleted""" - - -def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: - """Retrieves the diff of the data in the relation changed databag for v1. - - Args: - old_data: dictionary of the stored data before the event. - new_data: dictionary of the received data to compute the diff. - - Returns: - a Diff instance containing the added, deleted and changed - keys from the event relation databag. - """ - old_data = old_data or {} - - # These are the keys that were added to the databag and triggered this event. - added = new_data.keys() - old_data.keys() - # These are the keys that were removed from the databag and triggered this event. - deleted = old_data.keys() - new_data.keys() - # These are the keys that already existed in the databag, - # but had their values changed. - changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} - # Return the diff with all possible changes. - return Diff(added, changed, deleted) - - -def resource_added(diff: Diff) -> bool: - """Ensures that one of the aliased resources has been added.""" - return any(item in diff.added for item in RESOURCE_ALIASES + ["resource"]) - - -def store_new_data( - relation: Relation, - component: Unit | Application, - new_data: dict[str, str], - short_uuid: str | None = None, -): - """Stores the new data in the databag for diff computation.""" - # First, the case for V0 - if not short_uuid: - relation.data[component].update({"data": json.dumps(new_data)}) - # Then the case for V1, where we have a ShortUUID - else: - data = json.loads(relation.data[component].get("data", "{}")) - if not isinstance(data, dict): - raise ValueError - newest_data = copy.deepcopy(data) - newest_data[short_uuid] = new_data - relation.data[component].update({"data": json.dumps(newest_data)}) - - -############################################################################## -# Helper classes -############################################################################## - -SecretGroup = NewType("SecretGroup", str) - - -SecretString = TypeAliasType("SecretString", Annotated[str, Field(pattern="secret:.*")]) - - -class SecretBool(_SecretField[bool]): - """Class for booleans as secrets.""" - - _inner_schema: ClassVar[CoreSchema] = core_schema.bool_schema() - _error_kind: ClassVar[str] = "bool_type" - - def _display(self) -> str: - return "****" - - -OptionalSecretStr: TypeAlias = SecretStr | None -OptionalSecretBool: TypeAlias = SecretBool | None - -OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) - -UserSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "user"] -TlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "tls"] -TlsSecretBool = Annotated[OptionalSecretBool, Field(exclude=True, default=None), "tls"] -MtlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mtls"] -ExtraSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "extra"] -EntitySecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "entity"] - - -class Scope(Enum): - """Peer relations scope.""" - - APP = "app" - UNIT = "unit" - - -class CachedSecret: - """Locally cache a secret. - - The data structure is precisely reusing/simulating as in the actual Secret Storage - """ - - KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] - - def __init__( - self, - model: Model, - component: Application | Unit, - label: str, - secret_uri: str | None = None, - ): - self._secret_meta = None - self._secret_content = {} - self._secret_uri = secret_uri - self.label = label - self._model = model - self.component = component - self.current_label = None - - @property - def meta(self) -> Secret | None: - """Getting cached secret meta-information.""" - if not self._secret_meta: - if not (self._secret_uri or self.label): - return - - try: - self._secret_meta = self._model.get_secret(label=self.label) - except SecretNotFoundError: - # Falling back to seeking for potential legacy labels - logger.info(f"Secret with label {self.label} not found") - - # If still not found, to be checked by URI, to be labelled with the proposed label - if not self._secret_meta and self._secret_uri: - self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) - return self._secret_meta - - ########################################################################## - # Public functions - ########################################################################## - - def add_secret( - self, - content: dict[str, str], - relation: Relation | None = None, - label: str | None = None, - ) -> Secret: - """Create a new secret.""" - if self._secret_uri: - raise SecretAlreadyExistsError( - "Secret is already defined with uri %s", self._secret_uri - ) - - label = self.label if not label else label - - secret = self.component.add_secret(content, label=label) - if relation and relation.app != self._model.app: - # If it's not a peer relation, grant is to be applied - secret.grant(relation) - self._secret_uri = secret.id - self._secret_meta = secret - return self._secret_meta - - def get_content(self) -> dict[str, str]: - """Getting cached secret content.""" - if not self._secret_content: - if self.meta: - try: - self._secret_content = self.meta.get_content(refresh=True) - except (ValueError, ModelError) as err: - # https://bugs.launchpad.net/juju/+bug/2042596 - # Only triggered when 'refresh' is set - if isinstance(err, ModelError) and not any( - msg in str(err) for msg in self.KNOWN_MODEL_ERRORS - ): - raise - # Due to: ValueError: Secret owner cannot use refresh=True - self._secret_content = self.meta.get_content() - return self._secret_content - - def set_content(self, content: dict[str, str]) -> None: - """Setting cached secret content.""" - if not self.meta: - return - - if content == self.get_content(): - return - - if content: - self.meta.set_content(content) - self._secret_content = content - else: - self.meta.remove_all_revisions() - - def get_info(self) -> SecretInfo | None: - """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" - if self.meta: - return self.meta.get_info() - - def remove(self) -> None: - """Remove secret.""" - if not self.meta: - raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") - try: - self.meta.remove_all_revisions() - except SecretNotFoundError: - pass - self._secret_content = {} - self._secret_meta = None - self._secret_uri = None - - -class SecretCache: - """A data structure storing CachedSecret objects.""" - - def __init__(self, model: Model, component: Application | Unit): - self._model = model - self.component = component - self._secrets: dict[str, CachedSecret] = {} - - def get(self, label: str, uri: str | None = None) -> CachedSecret | None: - """Getting a secret from Juju Secret store or cache.""" - if not self._secrets.get(label): - secret = CachedSecret(self._model, self.component, label, uri) - if secret.meta: - self._secrets[label] = secret - return self._secrets.get(label) - - def add(self, label: str, content: dict[str, str], relation: Relation) -> CachedSecret: - """Adding a secret to Juju Secret.""" - if self._secrets.get(label): - raise SecretAlreadyExistsError(f"Secret {label} already exists") - - secret = CachedSecret(self._model, self.component, label) - secret.add_secret(content, relation) - self._secrets[label] = secret - return self._secrets[label] - - def remove(self, label: str) -> None: - """Remove a secret from the cache.""" - if secret := self.get(label): - try: - secret.remove() - self._secrets.pop(label) - except (SecretsUnavailableError, KeyError): - pass - else: - return - logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) - - -############################################################################## -# Models classes -############################################################################## - - -class PeerModel(BaseModel): - """Common Model for all peer relations.""" - - model_config = ConfigDict( - validate_by_name=True, - validate_by_alias=True, - populate_by_name=True, - serialize_by_alias=True, - alias_generator=lambda x: x.replace("_", "-"), - extra="allow", - ) - - @model_validator(mode="after") - def extract_secrets(self, info: ValidationInfo): - """Extract all secret_fields into their local field.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing as we're lacking context here.") - return self - repository: AbstractRepository = info.context.get("repository") - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = SecretGroup(field_info.metadata[0]) - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret = repository.get_secret(secret_group, secret_uri=None) - - if not secret: - logger.info(f"No secret for group {secret_group}") - continue - - value = secret.get_content().get(aliased_field) - - if value and field_info.annotation == OptionalSecretBool: - value = SecretBool(json.loads(value)) - elif value: - value = SecretStr(value) - setattr(self, field, value) - - return self - - @model_serializer(mode="wrap") - def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): - """Serializes the model writing the secrets in their respective secrets.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing serialization as we're lacking context here.") - return handler(self) - repository: AbstractRepository = info.context.get("repository") - - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = SecretGroup(field_info.metadata[0]) - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret = repository.get_secret(secret_group, secret_uri=None) - - value = getattr(self, field) - - actual_value = ( - value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value - ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) - - if secret is None: - if value: - secret = repository.add_secret( - aliased_field, - actual_value, - secret_group, - ) - if not secret or not secret.meta: - raise SecretError("No secret to send back") - continue - - content = secret.get_content() - full_content = copy.deepcopy(content) - - if value is None: - full_content.pop(aliased_field, None) - else: - full_content.update({aliased_field: actual_value}) - secret.set_content(full_content) - return handler(self) - - -class CommonModel(BaseModel): - """Common Model for both requirer and provider. - - request_id stores the request identifier for easier access. - resource is the requested resource. - """ - - model_config = ConfigDict( - validate_by_name=True, - validate_by_alias=True, - populate_by_name=True, - serialize_by_alias=True, - alias_generator=lambda x: x.replace("_", "-"), - extra="allow", - ) - - resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") - request_id: str | None = Field(default=None) - salt: str = Field( - description="This salt is used to create unique hashes even when other fields map 1-1", - default_factory=gen_salt, - ) - - @model_validator(mode="after") - def extract_secrets(self, info: ValidationInfo): - """Extract all secret_fields into their local field.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing as we're lacking context here.") - return self - repository: AbstractRepository = info.context.get("repository") - short_uuid = self.request_id or gen_hash(self.resource, self.salt) - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = field_info.metadata[0] - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret_field = repository.secret_field(secret_group, aliased_field).replace( - "-", "_" - ) - secret_uri: str | None = getattr(self, secret_field, None) - - if not secret_uri: - continue - - secret = repository.get_secret( - secret_group, secret_uri=secret_uri, short_uuid=short_uuid - ) - - if not secret: - logger.info(f"No secret for group {secret_group} and short uuid {short_uuid}") - continue - - value = secret.get_content().get(aliased_field) - if value and field_info.annotation == OptionalSecretBool: - value = SecretBool(json.loads(value)) - elif value: - value = SecretStr(value) - - setattr(self, field, value) - return self - - @model_serializer(mode="wrap") - def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): - """Serializes the model writing the secrets in their respective secrets.""" - _encountered_secrets: set[tuple[CachedSecret, str]] = set() - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing serialization as we're lacking context here.") - return handler(self) - repository: AbstractRepository = info.context.get("repository") - short_uuid = self.request_id or gen_hash(self.resource, self.salt) - # Backward compatibility for v0 regarding secrets. - if info.context.get("version") == "v0": - short_uuid = None - - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = field_info.metadata[0] - if not secret_group: - raise SecretsUnavailableError(field) - aliased_field = field_info.serialization_alias or field - secret_field = repository.secret_field(secret_group, aliased_field).replace( - "-", "_" - ) - secret_uri: str | None = getattr(self, secret_field, None) - secret = repository.get_secret( - secret_group, secret_uri=secret_uri, short_uuid=short_uuid - ) - - value = getattr(self, field) - - actual_value = ( - value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value - ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) - - if secret is None: - if value: - secret = repository.add_secret( - aliased_field, actual_value, secret_group, short_uuid - ) - if not secret or not secret.meta: - raise SecretError("No secret to send back") - setattr(self, secret_field, secret.meta.id) - continue - - content = secret.get_content() - full_content = copy.deepcopy(content) - - if value is None: - full_content.pop(aliased_field, None) - _encountered_secrets.add((secret, secret_field)) - else: - full_content.update({aliased_field: actual_value}) - secret.set_content(full_content) - - # Delete all empty secrets and clean up their fields. - for secret, secret_field in _encountered_secrets: - if not secret.get_content(): - # Setting a field to '' deletes it - setattr(self, secret_field, "") - repository.delete_secret(secret.label) - - return handler(self) - - @classmethod - def _get_secret_field(cls, field: str) -> SecretGroup | None: - """Checks if the field is a secret uri or not.""" - if not field.startswith(SECRET_PREFIX): - return None - - value = field.split("-")[1] - if info := cls.__pydantic_fields__.get(field.replace("-", "_")): - if info.annotation == SecretString: - return SecretGroup(value) - return None - - -class EntityPermissionModel(BaseModel): - """Entity Permissions Model.""" - - resource_name: str - resource_type: str - privileges: list - - -class RequirerCommonModel(CommonModel): - """Requirer side of the request model. - - extra_user_roles is used to request more roles for that user. - external_node_connectivity is used to indicate that the URI should be made for external clients when True - """ - - extra_user_roles: str | None = Field(default=None) - extra_group_roles: str | None = Field(default=None) - external_node_connectivity: bool = Field(default=False) - entity_type: Literal["USER", "GROUP"] | None = Field(default=None) - entity_permissions: list[EntityPermissionModel] | None = Field(default=None) - secret_mtls: SecretString | None = Field(default=None) - mtls_cert: MtlsSecretStr = Field(default=None) - - @model_validator(mode="after") - def validate_fields(self): - """Validates that no inconsistent request is being sent.""" - if self.entity_type and self.entity_type not in ["USER", "GROUP"]: - raise ValueError("Invalid entity-type. Possible values are USER and GROUP") - - if self.entity_type == "USER" and self.extra_group_roles: - raise ValueError("Inconsistent entity information. Use extra_user_roles instead") - - if self.entity_type == "GROUP" and self.extra_user_roles: - raise ValueError("Inconsistent entity information. Use extra_group_roles instead") - - return self - - -class ProviderCommonModel(CommonModel): - """Serialized fields added to the databag. - - endpoints stores the endpoints exposed to that client. - secret_user is a secret URI mapping to the user credentials - secret_tls is a secret URI mapping to the TLS certificate - secret_extra is a secret URI for all additional secrets requested. - """ - - endpoints: str | None = Field(default=None) - read_only_endpoints: str | None = Field(default=None) - secret_user: SecretString | None = Field(default=None) - secret_tls: SecretString | None = Field(default=None) - secret_extra: SecretString | None = Field(default=None) - secret_entity: SecretString | None = Field(default=None) - - -class ResourceProviderModel(ProviderCommonModel): - """Extended model including the deserialized fields.""" - - username: UserSecretStr = Field(default=None) - password: UserSecretStr = Field(default=None) - uris: UserSecretStr = Field(default=None) - read_only_uris: UserSecretStr = Field(default=None) - tls: TlsSecretBool = Field(default=None) - tls_ca: TlsSecretStr = Field(default=None) - entity_name: EntitySecretStr = Field(default=None) - entity_password: EntitySecretStr = Field(default=None) - version: str | None = Field(default=None) - - -class RequirerDataContractV0(RequirerCommonModel): - """Backward compatibility.""" - - version: Literal["v0"] = Field(default="v0") - - original_field: str = Field(exclude=True, default="") - - @model_validator(mode="before") - @classmethod - def ensure_original_field(cls, data: Any): - """Ensures that we keep the original field.""" - if isinstance(data, dict): - for alias in RESOURCE_ALIASES: - if data.get(alias) is not None: - data["original_field"] = alias - break - else: - for alias in RESOURCE_ALIASES: - if getattr(data, alias) is not None: - data.original_field = alias - return data - - -TResourceProviderModel = TypeVar("TResourceProviderModel", bound=ResourceProviderModel) -TRequirerCommonModel = TypeVar("TRequirerCommonModel", bound=RequirerCommonModel) - - -class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): - """The new Data Contract.""" - - version: Literal["v1"] = Field(default="v1") - requests: list[TRequirerCommonModel] - - -def discriminate_on_version(payload: Any) -> str: - """Use the version to discriminate.""" - if isinstance(payload, dict): - return payload.get("version", "v0") - return getattr(payload, "version", "v0") - - -RequirerDataContractType = Annotated[ - Annotated[RequirerDataContractV0, Tag("v0")] | Annotated[RequirerDataContractV1, Tag("v1")], - Discriminator(discriminate_on_version), -] - - -RequirerDataContract = TypeAdapter(RequirerDataContractType) - - -class DataContractV0(ResourceProviderModel): - """The Data contract of the response, for V0.""" - - -class DataContractV1(BaseModel, Generic[TResourceProviderModel]): - """The Data contract of the response, for V1.""" - - version: Literal["v1"] = Field(default="v1") - requests: list[TResourceProviderModel] = Field(default_factory=list) - - -DataContact = TypeAdapter(DataContractV1[ResourceProviderModel]) - - -TCommonModel = TypeVar("TCommonModel", bound=CommonModel) - - -def is_topic_value_acceptable(value: str | None) -> str | None: - """Check whether the given Kafka topic value is acceptable.""" - if value and "*" in value[:3]: - raise ValueError(f"Error on topic '{value}',, unacceptable value.") - return value - - -class KafkaRequestModel(RequirerCommonModel): - """Specialised model for Kafka.""" - - consumer_group_prefix: Annotated[str | None, AfterValidator(is_topic_value_acceptable)] = ( - Field(default=None) - ) - - -class KafkaResponseModel(ResourceProviderModel): - """Kafka response model.""" - - consumer_group_prefix: ExtraSecretStr = Field(default=None) - zookeeper_uris: ExtraSecretStr = Field(default=None) - - -############################################################################## -# AbstractRepository class -############################################################################## - - -class AbstractRepository(ABC): - """Abstract repository interface.""" - - @abstractmethod - def get_secret( - self, secret_group, secret_uri: str | None, short_uuid: str | None = None - ) -> CachedSecret | None: - """Gets a secret from the secret cache by uri or label.""" - ... - - @abstractmethod - def get_secret_field( - self, - field: str, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> str | None: - """Gets a value for a field stored in a secret group.""" - ... - - @abstractmethod - def get_field(self, field: str) -> str | None: - """Gets the value for one field.""" - ... - - @abstractmethod - def get_fields(self, *fields: str) -> dict[str, str | None]: - """Gets the values for all provided fields.""" - ... - - @abstractmethod - def write_field(self, field: str, value: Any) -> None: - """Writes the value in the field, without any secret support.""" - ... - - @abstractmethod - def write_fields(self, mapping: dict[str, Any]) -> None: - """Writes the values of mapping in the fields without any secret support (keys of mapping).""" - ... - - def write_secret_field( - self, field: str, value: Any, group: SecretGroup - ) -> CachedSecret | None: - """Writes a secret field.""" - ... - - @abstractmethod - def add_secret( - self, - field: str, - value: Any, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> CachedSecret | None: - """Gets a value for a field stored in a secret group.""" - ... - - @abstractmethod - def delete_secret(self, label: str): - """Deletes a secret by its label.""" - ... - - @abstractmethod - def delete_field(self, field: str) -> None: - """Deletes a field.""" - ... - - @abstractmethod - def delete_fields(self, *fields: str) -> None: - """Deletes all the provided fields.""" - ... - - @abstractmethod - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - """Delete a field stored in a secret group.""" - ... - - @abstractmethod - def register_secret(self, secret_group: SecretGroup, short_uuid: str | None = None) -> None: - """Registers a secret using the repository.""" - ... - - @abstractmethod - def get_data(self) -> dict[str, Any] | None: - """Gets the whole data.""" - ... - - @abstractmethod - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Builds a secret field.""" - - -class OpsRepository(AbstractRepository): - """Implementation for ops repositories, with some methods left out.""" - - SECRET_FIELD_NAME: str - - IGNORES_GROUPS: list[SecretGroup] = [] - - uri_to_databag: bool = True - - def __init__( - self, - model: Model, - relation: Relation | None, - component: Unit | Application, - ): - self._local_app = model.app - self._local_unit = model.unit - self.relation = relation - self.component = component - self.model = model - self.secrets = SecretCache(model, component) - - @abstractmethod - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None - ) -> str: - """Generate unique group mapping for secrets within a relation context.""" - ... - - @override - def get_data(self) -> dict[str, Any] | None: - ret: dict[str, Any] = {} - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - for key, value in self.relation.data[self.component].items(): - try: - ret[key] = json.loads(value) - except json.JSONDecodeError: - ret[key] = value - - return ret - - @override - @ensure_leader_for_app - def get_field( - self, - field: str, - ) -> str | None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - relation_data = self.relation.data[self.component] - return relation_data.get(field) - - @override - @ensure_leader_for_app - def get_fields(self, *fields: str) -> dict[str, str]: - res = {} - for field in fields: - if (value := self.get_field(field)) is not None: - res[field] = value - return res - - @override - @ensure_leader_for_app - def write_field(self, field: str, value: Any) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - if not value: - return None - self.relation.data[self.component].update({field: value}) - - @override - @ensure_leader_for_app - def write_fields(self, mapping: dict[str, Any]) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - (self.write_field(field, value) for field, value in mapping.items()) - - @override - @ensure_leader_for_app - def write_secret_field( - self, field: str, value: Any, secret_group: SecretGroup - ) -> CachedSecret | None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - label = self._generate_secret_label(self.relation, secret_group) - secret_uri = self.get_field(self.secret_field(secret_group, field)) - - secret = self.secrets.get(label=label, uri=secret_uri) - if not secret: - return self.add_secret(field, value, secret_group) - else: - content = secret.get_content() - full_content = copy.deepcopy(content) - full_content.update({field: value}) - secret.set_content(full_content) - return secret - - @override - @ensure_leader_for_app - def delete_field(self, field: str) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - relation_data = self.relation.data[self.component] - try: - relation_data.pop(field) - except KeyError: - logger.debug( - f"Non existent field {field} was attempted to be removed from the databag (relation ID: {self.relation.id})" - ) - - @override - @ensure_leader_for_app - def delete_fields(self, *fields: str) -> None: - (self.delete_field(field) for field in fields) - - @override - @ensure_leader_for_app - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - relation_data = self.relation.data[self.component] - secret_field = self.secret_field(secret_group, field) - - label = self._generate_secret_label(self.relation, secret_group) - secret_uri = relation_data.get(secret_field) - - secret = self.secrets.get(label=label, uri=secret_uri) - - if not secret: - logging.error(f"Can't delete secret for relation {self.relation.id}") - return None - - content = secret.get_content() - new_content = copy.deepcopy(content) - try: - new_content.pop(field) - except KeyError: - logging.debug( - f"Non-existing secret '{field}' was attempted to be removed" - f"from relation {self.relation.id} and group {secret_group}" - ) - - # Write the new secret content if necessary - if new_content: - secret.set_content(new_content) - return - - # Remove the secret from the relation if it's fully gone. - try: - relation_data.pop(field) - except KeyError: - pass - self.secrets.remove(label) - return - - @ensure_leader_for_app - def register_secret(self, uri: str, secret_group: SecretGroup, short_uuid: str | None = None): - """Registers the secret group for this relation. - - [MAGIC HERE] - If we fetch a secret using get_secret(id=, label=), - then will be "stuck" on the Secret object, whenever it may - appear (i.e. as an event attribute, or fetched manually) on future occasions. - - This will allow us to uniquely identify the secret on Provider side (typically on - 'secret-changed' events), and map it to the corresponding relation. - """ - if not self.relation: - raise ValueError("Cannot register without relation.") - - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - CachedSecret(self.model, self.component, label, uri).meta - - @override - def get_secret( - self, secret_group, secret_uri: str | None, short_uuid: str | None = None - ) -> CachedSecret | None: - """Gets a secret from the secret cache by uri or label.""" - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - return None - - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - - return self.secrets.get(label=label, uri=secret_uri) - - @override - def get_secret_field( - self, - field: str, - secret_group: SecretGroup, - uri: str | None = None, - short_uuid: str | None = None, - ) -> Any | None: - """Gets a value for a field stored in a secret group.""" - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - return None - - secret_field = self.secret_field(secret_group, field) - - relation_data = self.relation.data[self.component] - secret_uri = uri or relation_data.get(secret_field) - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - - if self.uri_to_databag and not secret_uri: - logger.info(f"No secret for group {secret_group} in relation {self.relation}") - return None - - secret = self.secrets.get(label=label, uri=secret_uri) - - if not secret: - logger.info(f"No secret for group {secret_group} in relation {self.relation}") - return None - - content = secret.get_content().get(field) - - if not content: - return - - try: - return json.loads(content) - except json.JSONDecodeError: - return content - - @override - @ensure_leader_for_app - def add_secret( - self, - field: str, - value: Any, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> CachedSecret | None: - if not self.relation: - logger.info("No relation to get value from") - return None - - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - - label = self._generate_secret_label(self.relation, secret_group, short_uuid) - - secret = self.secrets.add(label, {field: value}, self.relation) - - if not secret.meta or not secret.meta.id: - logging.error("Secret is missing Secret ID") - raise SecretError("Secret added but is missing Secret ID") - - return secret - - @override - @ensure_leader_for_app - def delete_secret(self, label: str) -> None: - self.secrets.remove(label) - - -@final -class OpsRelationRepository(OpsRepository): - """Implementation of the Abstract Repository for non peer relations.""" - - SECRET_FIELD_NAME: str = "secret" - - @override - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None - ) -> str: - """Generate unique group_mappings for secrets within a relation context.""" - if short_uuid: - return f"{relation.name}.{relation.id}.{short_uuid}.{secret_group}.secret" - return f"{relation.name}.{relation.id}.{secret_group}.secret" - - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Generates the field name to store in the peer relation.""" - return f"{self.SECRET_FIELD_NAME}-{secret_group}" - - -class OpsPeerRepository(OpsRepository): - """Implementation of the Ops Repository for peer relations.""" - - SECRET_FIELD_NAME = "internal_secret" - - IGNORES_GROUPS = [ - SecretGroup("user"), - SecretGroup("entity"), - SecretGroup("mtls"), - SecretGroup("tls"), - ] - - uri_to_databag: bool = False - - @property - def scope(self) -> Scope: - """Returns a scope.""" - if isinstance(self.component, Application): - return Scope.APP - if isinstance(self.component, Unit): - return Scope.UNIT - raise ValueError("Invalid component, neither a Unit nor an Application") - - @override - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None - ) -> str: - """Generate unique group_mappings for secrets within a relation context.""" - members = [relation.name, self._local_app.name, self.scope.value] - - if secret_group != SecretGroup("extra"): - members.append(secret_group) - return f"{'.'.join(members)}" - - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Generates the field name to store in the peer relation.""" - if not field: - raise ValueError("Must have a field.") - return f"{field}@{secret_group}" - - -@final -class OpsPeerUnitRepository(OpsPeerRepository): - """Implementation for a unit.""" - - @override - def __init__(self, model: Model, relation: Relation | None, component: Unit): - super().__init__(model, relation, component) - - -@final -class OpsOtherPeerUnitRepository(OpsPeerRepository): - """Implementation for a remote unit.""" - - @override - def __init__(self, model: Model, relation: Relation | None, component: Unit): - if component == model.unit: - raise ValueError(f"Can't instantiate {self.__class__.__name__} with local unit.") - super().__init__(model, relation, component) - - @override - def write_field(self, field: str, value: Any) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def write_fields(self, mapping: dict[str, Any]) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def add_secret( - self, field: str, value: Any, secret_group: SecretGroup, short_uuid: str | None = None - ) -> CachedSecret | None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_field(self, field: str) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_fields(self, *fields: str) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - -TRepository = TypeVar("TRepository", bound=OpsRepository) -TCommon = TypeVar("TCommon", bound=BaseModel) -TPeerCommon = TypeVar("TPeerCommon", bound=PeerModel) -TCommonBis = TypeVar("TCommonBis", bound=BaseModel) - - -class RepositoryInterface(Generic[TRepository, TCommon]): - """Repository builder.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - component: Unit | Application, - repository_type: type[TRepository], - model: type[TCommon] | TypeAdapter | None, - ): - self.charm = charm - self._model = charm.model - self.repository_type = repository_type - self.relation_name = relation_name - self.model = model - self.component = component - - @property - def relations(self) -> list[Relation]: - """The list of Relation instances associated with this relation name.""" - return self._model.relations[self.relation_name] - - def repository( - self, relation_id: int, component: Unit | Application | None = None - ) -> TRepository: - """Returns a repository for the relation.""" - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - return self.repository_type(self._model, relation, component or self.component) - - @overload - def build_model( - self, - relation_id: int, - model: type[TCommonBis], - component: Unit | Application | None = None, - ) -> TCommonBis: ... - - @overload - def build_model( - self, - relation_id: int, - model: type[TCommon], - component: Unit | Application | None = None, - ) -> TCommon: ... - - @overload - def build_model( - self, - relation_id: int, - model: TypeAdapter[TCommonBis], - component: Unit | Application | None = None, - ) -> TCommonBis: ... - - @overload - def build_model( - self, - relation_id: int, - model: None = None, - component: Unit | Application | None = None, - ) -> TCommon: ... - - def build_model( - self, - relation_id: int, - model: type[TCommon] | TypeAdapter[TCommonBis] | None = None, - component: Unit | Application | None = None, - ) -> TCommon | TCommonBis: - """Builds a model using the repository for that relation.""" - model = model or self.model # First the provided model (allows for specialisation) - component = component or self.component - if not model: - raise ValueError("Missing model to specialise data") - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - return build_model(self.repository_type(self._model, relation, component), model) - - def write_model( - self, relation_id: int, model: BaseModel, context: dict[str, str] | None = None - ): - """Writes the model using the repository.""" - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - - write_model( - self.repository_type(self._model, relation, self.component), model, context=context - ) - - -class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, TCommon]): - """Specialised Interface to build repositories for app peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.app, OpsRelationRepository, model) - - -class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): - """Specialised Interface to build repositories for app peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.app, OpsPeerRepository, model) - - -class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): - """Specialised Interface to build repositories for this unit peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.unit, OpsPeerUnitRepository, model) - - -class OpsOtherPeerUnitRepositoryInterface( - RepositoryInterface[OpsOtherPeerUnitRepository, TPeerCommon] -): - """Specialised Interface to build repositories for another unit peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - unit: Unit, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, unit, OpsOtherPeerUnitRepository, model) - - -############################################################################## -# DDD implementation methods -############################################################################## -############################################################################## - - -def build_model(repository: AbstractRepository, model: type[TCommon] | TypeAdapter) -> TCommon: - """Builds a common model using the provided repository and provided model structure.""" - data = repository.get_data() or {} - - data.pop("data", None) - - # Beware this means all fields should have a default value here. - if isinstance(model, TypeAdapter): - return model.validate_python(data, context={"repository": repository}) - - return model.model_validate(data, context={"repository": repository}) - - -def write_model( - repository: AbstractRepository, model: BaseModel, context: dict[str, str] | None = None -): - """Writes the data stored in the model using the repository object.""" - context = context or {} - dumped = model.model_dump( - mode="json", context={"repository": repository} | context, exclude_none=False - ) - for field, value in dumped.items(): - if value is None: - repository.delete_field(field) - continue - dumped_value = value if isinstance(value, str) else json.dumps(value) - repository.write_field(field, dumped_value) - - -############################################################################## -# Custom Events -############################################################################## - - -class ResourceProviderEvent(EventBase, Generic[TRequirerCommonModel]): - """Resource requested event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, request - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - request: TRequirerCommonModel, - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.request = request - - def snapshot(self) -> dict[str, Any]: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["request"] = pickle.dumps(self.request) - return snapshot - - def restore(self, snapshot: dict[str, Any]): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - self.request = pickle.loads(snapshot["request"]) - - -class ResourceRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource requested event.""" - - pass - - -class ResourceEntityRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource Entity requested event.""" - - pass - - -class ResourceEntityPermissionsChangedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource entity permissions changed event.""" - - pass - - -class MtlsCertUpdatedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource entity permissions changed event.""" - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - request: TRequirerCommonModel, - old_mtls_cert: str | None = None, - ): - super().__init__(handle, relation, app, unit, request) - - self.old_mtls_cert = old_mtls_cert - - def snapshot(self): - """Return a snapshot of the event.""" - return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} - - def restore(self, snapshot): - """Restore the event from a snapshot.""" - super().restore(snapshot) - self.old_mtls_cert = snapshot["old_mtls_cert"] - - -class BulkResourcesRequestedEvent(EventBase, Generic[TRequirerCommonModel]): - """Resource requested event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, request - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - requests: list[TRequirerCommonModel], - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.requests = requests - - def snapshot(self) -> dict[str, Any]: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["requests"] = [pickle.dumps(request) for request in self.requests] - return snapshot - - def restore(self, snapshot: dict[str, Any]): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - self.requests = [pickle.loads(request) for request in snapshot["requests"]] - - -class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): - """Database events. - - This class defines the events that the database can emit. - """ - - bulk_resources_requested = EventSource(BulkResourcesRequestedEvent) - resource_requested = EventSource(ResourceRequestedEvent) - resource_entity_requested = EventSource(ResourceEntityRequestedEvent) - resource_entity_permissions_changed = EventSource(ResourceEntityPermissionsChangedEvent) - mtls_cert_updated = EventSource(MtlsCertUpdatedEvent) - - -class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): - """Resource created/changed event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, response - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - response: TResourceProviderModel, - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.response = response - - def snapshot(self) -> dict: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["response"] = pickle.dumps(self.response) - return snapshot - - def restore(self, snapshot: dict): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - - self.response = pickle.loads(snapshot["response"]) - - -class ResourceCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Resource has been created.""" - - pass - - -class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Resource entity has been created.""" - - pass - - -class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read/Write enpoints are changed.""" - - pass - - -class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read-only enpoints are changed.""" - - pass - - -class AuthenticationUpdatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Authentication was updated for a user.""" - - pass - - -class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): - """Database events. - - This class defines the events that the database can emit. - """ - - resource_created = EventSource(ResourceCreatedEvent) - resource_entity_created = EventSource(ResourceEntityCreatedEvent) - endpoints_changed = EventSource(ResourceEndpointsChangedEvent) - read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) - authentication_updated = EventSource(AuthenticationUpdatedEvent) - - -############################################################################## -# Event Handlers -############################################################################## - - -class EventHandlers(Object): - """Requires-side of the relation.""" - - component: Application | Unit - interface: RepositoryInterface - - def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ""): - """Manager of base client relations.""" - if not unique_key: - unique_key = relation_name - super().__init__(charm, unique_key) - - self.charm = charm - self.relation_name = relation_name - - self.framework.observe( - charm.on[self.relation_name].relation_changed, - self._on_relation_changed_event, - ) - - self.framework.observe( - self.charm.on[self.relation_name].relation_created, - self._on_relation_created_event, - ) - - self.framework.observe( - charm.on.secret_changed, - self._on_secret_changed_event, - ) - - @property - def relations(self) -> list[Relation]: - """Shortcut to get access to the relations.""" - return self.interface.relations - - # Event handlers - - def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: - """Event emitted when the relation is created.""" - pass - - @abstractmethod - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - - @abstractmethod - def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - - @abstractmethod - def _handle_event( - self, - ): - """Handles the event and reacts accordingly.""" - pass - - def compute_diff( - self, - relation: Relation, - request: RequirerCommonModel | ResourceProviderModel, - repository: AbstractRepository | None = None, - store: bool = True, - ) -> Diff: - """Computes, stores and returns a diff for that request.""" - if not repository: - repository = OpsRelationRepository(self.model, relation, component=relation.app) - - # Gets the data stored in the databag for diff computation - old_data = get_encoded_dict(relation, self.component, "data") - - # In case we're V1, we select specifically this request - if old_data and request.request_id: - old_data: dict | None = old_data.get(request.request_id, None) - - # dump the data of the current request so we can compare - new_data = request.model_dump( - mode="json", - exclude={"data"}, - exclude_none=True, - exclude_defaults=True, - ) - - # Computes the diff - _diff = diff(old_data, new_data) - - if store: - # Update the databag with the new data for later diff computations - store_new_data(relation, self.component, new_data, short_uuid=request.request_id) - - return _diff - - def _relation_from_secret_label(self, secret_label: str) -> Relation | None: - """Retrieve the relation that belongs to a secret label.""" - contents = secret_label.split(".") - - if not (contents and len(contents) >= 3): - return - - try: - relation_id = int(contents[1]) - except ValueError: - return - - relation_name = contents[0] - - try: - return self.model.get_relation(relation_name, relation_id) - except ModelError: - return - - def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: - """Retrieve the relation that belongs to a secret label.""" - contents = secret_label.split(".") - - if not (contents and len(contents) >= 5): - return - - return contents[2] - - -class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): - """Event Handler for resource provider.""" - - on = ResourceProvidesEvents[TRequirerCommonModel]() # type: ignore[reportAssignmentType] - - def __init__( - self, - charm: CharmBase, - relation_name: str, - request_model: type[TRequirerCommonModel], - unique_key: str = "", - mtls_enabled: bool = False, - bulk_event: bool = False, - ): - """Builds a resource provider event handler. - - Args: - charm: The charm. - relation_name: The relation name this event handler is listening to. - request_model: The request model that is expected to be received. - unique_key: An optional unique key for that object. - mtls_enabled: If True, means the server supports MTLS integration. - bulk_event: If this is true, only one event will be emitted with all requests in the case of a v1 requirer. - """ - super().__init__(charm, relation_name, unique_key) - self.component = self.charm.app - self.request_model = request_model - self.interface = OpsRelationRepositoryInterface(charm, relation_name, request_model) - self.mtls_enabled = mtls_enabled - self.bulk_event = bulk_event - - @staticmethod - def _validate_diff(event: RelationEvent, _diff: Diff) -> None: - """Validates that entity information is not changed after relation is established. - - - When entity-type changes, backwards compatibility is broken. - - When extra-user-roles changes, role membership checks become incredibly complex. - - When extra-group-roles changes, role membership checks become incredibly complex. - """ - if not isinstance(event, RelationChangedEvent): - return - - for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: - if key in _diff.changed: - raise ValueError(f"Cannot change {key} after relation has already been created") - - def _dispatch_events(self, event: RelationEvent, _diff: Diff, request: RequirerCommonModel): - if self.mtls_enabled and "secret-mtls" in _diff.added: - getattr(self.on, "mtls_cert_updated").emit( - event.relation, app=event.app, unit=event.unit, request=request, old_mtls_cert=None - ) - return - # Emit a resource requested event if the setup key (resource name) - # was added to the relation databag, but the entity-type key was not. - if resource_added(_diff) and "entity-type" not in _diff.added: - getattr(self.on, "resource_requested").emit( - event.relation, - app=event.app, - unit=event.unit, - request=request, - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - # Emit an entity requested event if the setup key (resource name) - # was added to the relation databag, in addition to the entity-type key. - if resource_added(_diff) and "entity-type" in _diff.added: - getattr(self.on, "resource_entity_requested").emit( - event.relation, - app=event.app, - unit=event.unit, - request=request, - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - # Emit a permissions changed event if the setup key (resource name) - # was added to the relation databag, and the entity-permissions key changed. - if ( - not resource_added(_diff) - and "entity-type" not in _diff.added - and ("entity-permissions" in _diff.added or "entity-permissions" in _diff.changed) - ): - getattr(self.on, "resource_entity_permissions_changed").emit( - event.relation, app=event.app, unit=event.unit, request=request - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - @override - def _handle_event( - self, - event: RelationChangedEvent, - repository: AbstractRepository, - request: RequirerCommonModel, - ): - _diff = self.compute_diff(event.relation, request, repository) - - self._validate_diff(event, _diff) - self._dispatch_events(event, _diff, request) - - def _handle_bulk_event( - self, - event: RelationChangedEvent, - repository: AbstractRepository, - request_model: RequirerDataContractV1[TRequirerCommonModel], - ): - """Validate all the diffs, then dispatch the bulk event AND THEN stores the diff. - - This allows for the developer to process the diff and store it themselves - """ - for request in request_model.requests: - # Compute the diff without storing it so we can validate the diffs. - _diff = self.compute_diff(event.relation, request, repository, store=False) - self._validate_diff(event, _diff) - - getattr(self.on, "bulk_resources_requested").emit( - event.relation, app=event.app, unit=event.unit, requests=request_model.requests - ) - - # Store all the diffs if they were not already stored. - for request in request_model.requests: - new_data = request.model_dump( - mode="json", - exclude={"data"}, - context={"repository": repository}, - exclude_none=True, - exclude_defaults=True, - ) - store_new_data(event.relation, self.component, new_data, request.request_id) - - @override - def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: - if not self.mtls_enabled: - logger.info("MTLS is disabled, exiting early.") - return - if not event.secret.label: - return - - relation = self._relation_from_secret_label(event.secret.label) - short_uuid = self._short_uuid_from_secret_label(event.secret.label) - - if not relation: - logging.info( - f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" - ) - return - - if relation.app == self.charm.app: - logging.info("Secret changed event ignored for Secret Owner") - return - - if relation.name != self.relation_name: - logging.info("Secret changed on wrong relation.") - return - - remote_unit = None - for unit in relation.units: - if unit.app != self.charm.app: - remote_unit = unit - break - - repository = OpsRelationRepository(self.model, relation, component=relation.app) - version = repository.get_field("version") or "v0" - - old_mtls_cert = event.secret.get_content().get("mtls-cert") - logger.info("mtls-cert-updated") - - # V0, just fire the event. - if version == "v0": - request = build_model(repository, RequirerDataContractV0) - # V1, find the corresponding request. - else: - request_model = build_model(repository, RequirerDataContractV1[self.request_model]) - if not short_uuid: - return - for _request in request_model.requests: - if _request.request_id == short_uuid: - request = _request - break - else: - logger.info(f"Unknown request id {short_uuid}") - return - - getattr(self.on, "mtls_cert_updated").emit( - relation, - app=relation.app, - unit=remote_unit, - request=request, - mtls_cert=old_mtls_cert, - ) - - @override - def _on_relation_changed_event(self, event: RelationChangedEvent): - if not self.charm.unit.is_leader(): - return - - repository = OpsRelationRepository( - self.model, event.relation, component=event.relation.app - ) - - # Don't do anything until we get some data - if not repository.get_data(): - return - - version = repository.get_field("version") or "v0" - if version == "v0": - request_model = build_model(repository, RequirerDataContractV0) - old_name = request_model.original_field - request_model.request_id = None # For safety, let's ensure that we don't have a model. - self._handle_event(event, repository, request_model) - logger.info( - f"Patching databag for v0 compatibility: replacing 'resource' by '{old_name}'" - ) - self.interface.repository( - event.relation.id, - ).write_field(old_name, request_model.resource) - else: - request_model = build_model(repository, RequirerDataContractV1[self.request_model]) - if self.bulk_event: - self._handle_bulk_event(event, repository, request_model) - return - for request in request_model.requests: - self._handle_event(event, repository, request) - - def set_response(self, relation_id: int, response: ResourceProviderModel): - r"""Sets a response in the databag. - - This function will react accordingly to the version number. - If the version number is v0, then we write the data directly in the databag. - If the version number is v1, then we write the data in the list of responses. - - /!\ This function updates a response if it was already present in the databag! - - Args: - relation_id: The specific relation id for that event. - response: The response to write in the databag. - """ - if not self.charm.unit.is_leader(): - return - - relation = self.charm.model.get_relation(self.relation_name, relation_id) - - if not relation: - raise ValueError("Missing relation.") - - repository = OpsRelationRepository(self.model, relation, component=relation.app) - version = repository.get_field("version") or "v0" - - if version == "v0": - # Ensure the request_id is None - response.request_id = None - self.interface.write_model( - relation_id, response, context={"version": "v0"} - ) # {"database": "database-name", "secret-user": "uri", ...} - return - - model = self.interface.build_model(relation_id, DataContractV1[response.__class__]) - - # for/else syntax allows to execute the else if break was not called. - # This allows us to update or append easily. - for index, _response in enumerate(model.requests): - if _response.request_id == response.request_id: - model.requests[index] = response - break - else: - model.requests.append(response) - - self.interface.write_model(relation_id, model) - return - - -class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): - """Event Handler for resource requirer.""" - - on = ResourceRequiresEvents[TResourceProviderModel]() # type: ignore[reportAssignmentType] - - def __init__( - self, - charm: CharmBase, - relation_name: str, - requests: list[RequirerCommonModel], - response_model: type[TResourceProviderModel], - unique_key: str = "", - relation_aliases: list[str] | None = None, - ): - super().__init__(charm, relation_name, unique_key) - self.component = self.charm.unit - self.relation_aliases = relation_aliases - self._requests = requests - self.response_model = DataContractV1[response_model] - self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( - OpsRelationRepositoryInterface(charm, relation_name, self.response_model) - ) - - if requests: - self._request_model = requests[0].__class__ - else: - self._request_model = RequirerCommonModel - - # First, check that the number of aliases matches the one defined in charm metadata. - if self.relation_aliases: - relation_connection_limit = self.charm.meta.requires[relation_name].limit - if len(self.relation_aliases) != relation_connection_limit: - raise ValueError( - f"Invalid number of aliases, expected {relation_connection_limit}, received {len(self.relation_aliases)}" - ) - - # Created custom event names for each alias. - if self.relation_aliases: - for relation_alias in self.relation_aliases: - self.on.define_event( - f"{relation_alias}_resource_created", - ResourceCreatedEvent, - ) - self.on.define_event( - f"{relation_alias}_resource_entity_created", - ResourceEntityCreatedEvent, - ) - self.on.define_event( - f"{relation_alias}_endpoints_changed", - ResourceEndpointsChangedEvent, - ) - self.on.define_event( - f"{relation_alias}_read_only_endpoints_changed", - ResourceReadOnlyEndpointsChangedEvent, - ) - - ############################################################################## - # Extra useful functions - ############################################################################## - def is_resource_created( - self, - rel_id: int, - request_id: str, - model: DataContractV1[TResourceProviderModel] | None = None, - ) -> bool: - """Checks if a resource has been created or not. - - Args: - rel_id: The relation id to check. - request_id: The specific request id to check. - model: An optional model to use (for performances). - """ - if not model: - relation = self.model.get_relation(self.relation_name, rel_id) - if not relation: - return False - model = self.interface.build_model(relation_id=rel_id, component=relation.app) - for request in model.requests: - if request.request_id == request_id: - return request.secret_user is not None or request.secret_entity is not None - return False - - def are_all_resources_created(self, rel_id: int) -> bool: - """Checks that all resources have been created for a relation. - - Args: - rel_id: The relation id to check. - """ - relation = self.model.get_relation(self.relation_name, rel_id) - if not relation: - return False - model = self.interface.build_model(relation_id=rel_id, component=relation.app) - return all( - self.is_resource_created(rel_id, request.request_id, model) - for request in model.requests - if request.request_id - ) - - @staticmethod - def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: - # Actual checking method. - # No need to check for psycopg here, it's been checked before. - if not psycopg2: - return False - - try: - with psycopg2.connect(connection_string) as connection: - with connection.cursor() as cursor: - cursor.execute( - "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) - ) - return cursor.fetchone() is not None - except psycopg2.Error as e: - logger.exception( - f"failed to check whether {plugin} plugin is enabled in the database: %s", - str(e), - ) - return False - - def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: - """Returns whether a plugin is enabled in the database. - - Args: - plugin: name of the plugin to check. - relation_index: Optional index to check the database (default: 0 - first relation). - """ - if not psycopg2: - return False - - # Can't check a non existing relation. - if len(self.relations) <= relation_index: - return False - - relation = self.relations[relation_index] - model = self.interface.build_model(relation_id=relation.id, component=relation.app) - for request in model.requests: - if request.endpoints and request.username and request.password: - host = request.endpoints.split(":")[0] - username = request.username.get_secret_value() - password = request.password.get_secret_value() - - connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" - return self._is_pg_plugin_enabled(plugin, connection_string) - logger.info("No valid request to use to check for plugin.") - return False - - ############################################################################## - # Helpers for aliases - ############################################################################## - - def _assign_relation_alias(self, relation_id: int) -> None: - """Assigns an alias to a relation. - - This function writes in the unit data bag. - - Args: - relation_id: the identifier for a particular relation. - """ - # If no aliases were provided, return immediately. - if not self.relation_aliases: - return - - # Return if an alias was already assigned to this relation - # (like when there are more than one unit joining the relation). - relation = self.charm.model.get_relation(self.relation_name, relation_id) - if relation and relation.data[self.charm.unit].get("alias"): - return - - # Retrieve the available aliases (the ones that weren't assigned to any relation). - available_aliases = self.relation_aliases[:] - for relation in self.charm.model.relations[self.relation_name]: - alias = relation.data[self.charm.unit].get("alias") - if alias: - logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) - available_aliases.remove(alias) - - # Set the alias in the unit relation databag of the specific relation. - relation = self.charm.model.get_relation(self.relation_name, relation_id) - if relation: - relation.data[self.charm.unit].update({"alias": available_aliases[0]}) - - # We need to set relation alias also on the application level so, - # it will be accessible in show-unit juju command, executed for a consumer application unit - if relation and self.charm.unit.is_leader(): - relation.data[self.charm.app].update({"alias": available_aliases[0]}) - - def _emit_aliased_event( - self, event: RelationChangedEvent, event_name: str, response: ResourceProviderModel - ): - """Emit all aliased events.""" - alias = self._get_relation_alias(event.relation.id) - if alias: - getattr(self.on, f"{alias}_{event_name}").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - - def _get_relation_alias(self, relation_id: int) -> str | None: - """Gets the relation alias for a relation id.""" - for relation in self.charm.model.relations[self.relation_name]: - if relation.id == relation_id: - return relation.data[self.charm.unit].get("alias") - return None - - ############################################################################## - # Event Handlers - ############################################################################## - - def _on_secret_changed_event(self, event: SecretChangedEvent): - """Event notifying about a new value of a secret.""" - if not event.secret.label: - return - relation = self._relation_from_secret_label(event.secret.label) - short_uuid = self._short_uuid_from_secret_label(event.secret.label) - - if not relation: - logging.info( - f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" - ) - return - - if relation.app == self.charm.app: - logging.info("Secret changed event ignored for Secret Owner") - return - - if relation.name != self.relation_name: - logging.info("Secret changed on wrong relation.") - return - - remote_unit = None - for unit in relation.units: - if unit.app != self.charm.app: - remote_unit = unit - break - - response_model = self.interface.build_model(relation.id) - if not short_uuid: - return - for _response in response_model.requests: - if _response.request_id == short_uuid: - response = _response - break - else: - logger.info(f"Unknown request id {short_uuid}") - return - - getattr(self.on, "authentication_updated").emit( - relation, - app=relation.app, - unit=remote_unit, - response=response, - ) - - def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: - """Event emitted when the database relation is created.""" - super()._on_relation_created_event(event) - - repository = OpsRelationRepository(self.model, event.relation, self.charm.app) - - # If relations aliases were provided, assign one to the relation. - self._assign_relation_alias(event.relation.id) - - if not self.charm.unit.is_leader(): - return - - # Generate all requests id so they are saved already. - for request in self._requests: - request.request_id = gen_hash(request.resource, request.salt) - - full_request = RequirerDataContractV1[self._request_model]( - version="v1", requests=self._requests - ) - write_model(repository, full_request) - - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the database relation has changed.""" - is_subordinate = False - remote_unit_data = None - for key in event.relation.data.keys(): - if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): - remote_unit_data = event.relation.data[key] - elif isinstance(key, Application) and key.name != self.charm.app.name: - is_subordinate = event.relation.data[key].get("subordinated") == "true" - - if is_subordinate: - if not remote_unit_data or remote_unit_data.get("state") != "ready": - return - - repository = self.interface.repository(event.relation.id, event.app) - response_model = self.interface.build_model(event.relation.id, component=event.app) - - if not response_model.requests: - logger.info("Still waiting for data.") - return - - data = repository.get_field("data") - if not data: - logger.info("Missing data to compute diffs") - return - - request_map = TypeAdapter(dict[str, self._request_model]).validate_json(data) - - for response in response_model.requests: - response_id = response.request_id or gen_hash(response.resource, response.salt) - request = request_map.get(response_id, None) - if not request: - raise ValueError( - f"No request matching the response with response_id {response_id}" - ) - self._handle_event(event, repository, request, response) - - ############################################################################## - # Methods to handle specificities of relation events - ############################################################################## - - @override - def _handle_event( - self, - event: RelationChangedEvent, - repository: OpsRelationRepository, - request: RequirerCommonModel, - response: ResourceProviderModel, - ): - _diff = self.compute_diff(event.relation, response, repository, store=True) - - for newval in _diff.added: - if secret_group := response._get_secret_field(newval): - uri = getattr(response, newval.replace("-", "_")) - repository.register_secret(uri, secret_group, response.request_id) - - if "secret-user" in _diff.added and not request.entity_type: - logger.info(f"resource {response.resource} created at {datetime.now()}") - getattr(self.on, "resource_created").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "resource_created", response) - return - - if "secret-entity" in _diff.added and request.entity_type: - logger.info(f"entity {response.entity_name} created at {datetime.now()}") - getattr(self.on, "resource_entity_created").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "resource_entity_created", response) - return - - if "endpoints" in _diff.added or "endpoints" in _diff.changed: - logger.info(f"endpoints changed at {datetime.now()}") - getattr(self.on, "endpoints_changed").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "endpoints_changed", response) - return - - if "read-only-endpoints" in _diff.added or "read-only-endpoints" in _diff.changed: - logger.info(f"read-only-endpoints changed at {datetime.now()}") - getattr(self.on, "read_only_endpoints_changed").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "read_only_endpoints_changed", response) - return diff --git a/tests/v1/integration/application-charm/src/charm.py b/tests/v1/integration/application-charm/src/charm.py index b7167b7a..3d122b8b 100755 --- a/tests/v1/integration/application-charm/src/charm.py +++ b/tests/v1/integration/application-charm/src/charm.py @@ -16,6 +16,7 @@ from ops.main import main from ops.model import ActiveStatus from pydantic import Field, SecretStr +from pydantic.types import _SecretBase from charms.data_platform_libs.v1.data_interfaces import ( ExtraSecretStr, @@ -66,7 +67,7 @@ def __init__(self, *args): requests=[ RequirerCommonModel(resource=database_name, extra_user_roles=EXTRA_USER_ROLES) ], - response_model=ResourceProviderModel, + response_model=ExtendedResponseModel, ) self.first_database_roles = ResourceRequirerEventHandler( self, @@ -76,7 +77,7 @@ def __init__(self, *args): resource=database_name, entity_type="USER", extra_user_roles=EXTRA_USER_ROLES ) ], - response_model=ResourceProviderModel, + response_model=ExtendedResponseModel, ) self.framework.observe( self.first_database.on.resource_created, self._on_first_database_created @@ -325,9 +326,11 @@ def _on_start(self, _) -> None: def _on_get_relation_field(self, event: ActionEvent): """Get requested relation field (OTHER side).""" source, relation = self._get_relation(event.params["relation_id"]) - value = source.interface.repository(relation.id, relation.app).get_field( - event.params["field"] - ) + value = None + model = source.interface.build_model(relation.id, component=relation.app) + for request in model.requests: + value = getattr(request, event.params["field"].replace("-", "_")) + value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value event.set_results({"value": value if value else ""}) def _on_get_relation_self_side_field(self, event: ActionEvent): diff --git a/tests/v1/integration/database-charm/src/charm.py b/tests/v1/integration/database-charm/src/charm.py index d152d3e0..9217cd70 100755 --- a/tests/v1/integration/database-charm/src/charm.py +++ b/tests/v1/integration/database-charm/src/charm.py @@ -170,14 +170,18 @@ def _on_change_admin_password(self, event: ActionEvent): def _on_set_secret_action(self, event: ActionEvent): """Change the admin password.""" secret_field: str | None = event.params.get("field") - if not secret_field: + rel_id = event.params.get("relation_id") + if not secret_field or not rel_id: event.fail("Invalid empty field.") return password = event.params.get("value", self._new_password()) for relation in self.database.interface.relations: - model = self.database.interface.build_model(relation.id, DataContract) - for request in model.requests: - setattr(request, secret_field, password) + if relation.id == rel_id: + break + model = self.database.interface.build_model(relation.id, DataContract) + for request in model.requests: + setattr(request, secret_field, password) + self.database.interface.write_model(relation.id, model) def _on_database_pebble_ready(self, event: WorkloadEvent) -> None: """Define and start the database using the Pebble API.""" @@ -314,7 +318,7 @@ def _on_get_relation_field(self, event: ActionEvent): """[second_database]: Get requested relation field.""" relation = self._get_relation(event.params["relation_id"]) value = None - model = self.database.interface.build_model(relation.id) + model = self.database.interface.build_model(relation.id, DataContract) for request in model.requests: value = getattr(request, event.params["field"].replace("-", "_")) value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value @@ -324,7 +328,7 @@ def _on_get_relation_self_side_field(self, event: ActionEvent): """[second_database]: Get requested relation field.""" relation = self._get_relation(event.params["relation_id"]) value = None - model = self.database.interface.build_model(relation.id) + model = self.database.interface.build_model(relation.id, DataContract) for request in model.requests: value = getattr(request, event.params["field"].replace("-", "_")) value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value @@ -333,7 +337,7 @@ def _on_get_relation_self_side_field(self, event: ActionEvent): def _on_set_relation_field(self, event: ActionEvent): """Set requested relation field.""" relation = self._get_relation(event.params["relation_id"]) - model = self.database.interface.build_model(relation.id) + model = self.database.interface.build_model(relation.id, DataContract) for request in model.requests: setattr(request, event.params["field"].replace("-", "_"), event.params["value"]) self.database.interface.write_model(relation.id, model) @@ -341,7 +345,7 @@ def _on_set_relation_field(self, event: ActionEvent): def _on_delete_relation_field(self, event: ActionEvent): """Delete requested relation field.""" relation = self._get_relation(event.params["relation_id"]) - model = self.database.interface.build_model(relation.id) + model = self.database.interface.build_model(relation.id, DataContract) for request in model.requests: setattr(request, event.params["field"].replace("-", "_"), None) # Charms should be compatible with old vesrions, to simulatrams["field"]) @@ -454,7 +458,7 @@ def _on_get_other_peer_relation_field(self, event: ActionEvent): event.fail("Missing relation") return for unit, interface in self.peer_units_data_interfaces.items(): - model = interface.build_model(relation.id) + model = interface.build_model(relation.id, DataContract) value[unit.name.replace("/", "-")] = getattr( model, event.params["field"].replace("-", "_") ) diff --git a/tests/v1/integration/dummy-database-charm/lib/charms/data_platform_libs/v1/data_interfaces.py b/tests/v1/integration/dummy-database-charm/lib/charms/data_platform_libs/v1/data_interfaces.py deleted file mode 100644 index e22388d1..00000000 --- a/tests/v1/integration/dummy-database-charm/lib/charms/data_platform_libs/v1/data_interfaces.py +++ /dev/null @@ -1,2753 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -r"""Library to manage the relation for the data-platform products. - -This V1 has been specified in https://docs.google.com/document/d/1lnuonWnoQb36RWYwfHOBwU0VClLbawpTISXIC_yNKYo, and should be backward compatible with v0 clients. - -This library contains the Requires and Provides classes for handling the relation -between an application and multiple managed application supported by the data-team: -MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. - -#### Models - -This library exposes basic default models that can be used in most cases. -If you need more complex models, you can subclass them. - -```python -from charms.data_platform_libs.v1.data_interfaces import RequirerCommonModel, ExtraSecretStr - -class ExtendedCommonModel(RequirerCommonModel): - operator_password: ExtraSecretStr -``` - -Secret groups are handled using annotated types. If you wish to add extra secret groups, please follow the following model. The string metadata represents the secret group name, and `OptionalSecretStr` is a TypeAlias for `SecretStr | None`. Finally, `SecretStr` represents a field validating the URI pattern `secret:.*` - -```python -MyGroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] -``` - -Fields not specified as OptionalSecretStr and extended with a group name in the metadata will NOT get serialised. - - -#### Requirer Charm - -This library is a uniform interface to a selection of common database -metadata, with added custom events that add convenience to database management, -and methods to consume the application related data. - - -```python -from charms.data_platform_libs.v1.data_interfaces import ( - RequirerCommonModel, - RequirerDataContractV1, - ResourceCreatedEvent, - ResourceEntityCreatedEvent, - ResourceProviderModel, - ResourceRequirerEventHandler, -) - -class ClientCharm(CharmBase): - # Database charm that accepts connections from application charms. - def __init__(self, *args) -> None: - super().__init__(*args) - - requests = [ - RequirerCommonModel( - resource="clientdb", - ), - RequirerCommonModel( - resource="clientbis", - ), - RequirerCommonModel( - entity_type="USER", - ) - ] - self.database = ResourceRequirerEventHandler( - self,"database", requests, response_model=ResourceProviderModel - ) - self.framework.observe(self.database.on.resource_created, self._on_resource_created) - self.framework.observe(self.database.on.resource_entity_created, self._on_resource_entity_created) - - def _on_resource_created(self, event: ResourceCreatedEvent) -> None: - # Event triggered when a new database is created. - relation_id = event.relation.id - response = event.response # This is the response model - - username = event.response.username - password = event.response.password - ... - - def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: - # Event triggered when a new entity is created. - ... - -Compared to V1, this library makes heavy use of pydantic models, and allows for -multiple requests, specified as a list. -On the Requirer side, each response will trigger one custom event for that response. -This way, it allows for more strategic events to be emitted according to the request. - -As show above, the library provides some custom events to handle specific situations, which are listed below: -- resource_created: event emitted when the requested database is created. -- resource_entity_created: event emitted when the requested entity is created. -- endpoints_changed: event emitted when the read/write endpoints of the database have changed. -- read_only_endpoints_changed: event emitted when the read-only endpoints of the database - have changed. Event is not triggered if read/write endpoints changed too. - -If it is needed to connect multiple database clusters to the same relation endpoint -the application charm can implement the same code as if it would connect to only -one database cluster (like the above code example). - -To differentiate multiple clusters connected to the same relation endpoint -the application charm can use the name of the remote application: - -```python - -def _on_resource_created(self, event: ResourceCreatedEvent) -> None: - # Get the remote app name of the cluster that triggered this event - cluster = event.relation.app.name -``` - -It is also possible to provide an alias for each different database cluster/relation. - -So, it is possible to differentiate the clusters in two ways. -The first is to use the remote application name, i.e., `event.relation.app.name`, as above. - -The second way is to use different event handlers to handle each cluster events. -The implementation would be something like the following code: - -```python - -from charms.data_platform_libs.v1.data_interfaces import ( - RequirerCommonModel, - RequirerDataContractV1, - ResourceCreatedEvent, - ResourceEntityCreatedEvent, - ResourceProviderModel, - ResourceRequirerEventHandler, -) - -class ApplicationCharm(CharmBase): - # Application charm that connects to database charms. - - def __init__(self, *args): - super().__init__(*args) - - requests = [ - RequirerCommonModel( - resource="clientdb", - ), - RequirerCommonModel( - resource="clientbis", - ), - ] - # Define the cluster aliases and one handler for each cluster database created event. - self.database = ResourceRequirerEventHandler( - self, - relation_name="database" - relations_aliases = ["cluster1", "cluster2"], - requests= - ) - self.framework.observe( - self.database.on.cluster1_resource_created, self._on_cluster1_resource_created - ) - self.framework.observe( - self.database.on.cluster2_resource_created, self._on_cluster2_resource_created - ) - - def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: - # Handle the created database on the cluster named cluster1 - - # Create configuration file for app - config_file = self._render_app_config_file( - event.response.username, - event.response.password, - event.response.endpoints, - ) - ... - - def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: - # Handle the created database on the cluster named cluster2 - - # Create configuration file for app - config_file = self._render_app_config_file( - event.response.username, - event.response.password, - event.response.endpoints, - ) - ... -``` - -### Provider Charm - -Following an example of using the ResourceRequestedEvent, in the context of the -database charm code: - -```python -from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides - -class SampleCharm(CharmBase): - - def __init__(self, *args): - super().__init__(*args) - # Charm events defined in the database provides charm library. - self.provided_database = DatabaseProvides(self, relation_name="database") - self.framework.observe(self.provided_database.on.database_requested, - self._on_database_requested) - # Database generic helper - self.database = DatabaseHelper() - - def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: - # Handle the event triggered by a new database requested in the relation - # Retrieve the database name using the charm library. - db_name = event.database - # generate a new user credential - username = self.database.generate_user() - password = self.database.generate_password() - # set the credentials for the relation - self.provided_database.set_credentials(event.relation.id, username, password) - # set other variables for the relation event.set_tls("False") -``` - -As shown above, the library provides a custom event (database_requested) to handle -the situation when an application charm requests a new database to be created. -It's preferred to subscribe to this event instead of relation changed event to avoid -creating a new database when other information other than a database name is -exchanged in the relation databag. - -""" - -import copy -import hashlib -import json -import logging -import pickle -import random -import string -from abc import ABC, abstractmethod -from collections import namedtuple -from datetime import datetime -from enum import Enum -from typing import ( - Annotated, - Any, - ClassVar, - Generic, - Literal, - NewType, - TypeAlias, - TypeVar, - final, - overload, -) - -from ops import ( - CharmBase, - EventBase, - Model, - RelationChangedEvent, - RelationCreatedEvent, - RelationEvent, - Secret, - SecretChangedEvent, - SecretInfo, - SecretNotFoundError, -) -from ops.charm import CharmEvents -from ops.framework import EventSource, Handle, Object -from ops.model import Application, ModelError, Relation, Unit -from pydantic import ( - AfterValidator, - AliasChoices, - BaseModel, - ConfigDict, - Discriminator, - Field, - SecretStr, - SerializationInfo, - SerializerFunctionWrapHandler, - Tag, - TypeAdapter, - ValidationInfo, - model_serializer, - model_validator, -) -from pydantic.types import _SecretBase, _SecretField -from pydantic_core import CoreSchema, core_schema -from typing_extensions import TypeAliasType, override - -try: - import psycopg2 -except ImportError: - psycopg2 = None - -# The unique Charmhub library identifier, never change it -LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" - -# Increment this major API version when introducing breaking changes -LIBAPI = 1 - -# Increment this PATCH version before using `charmcraft publish-lib` or reset -# to 0 if you are raising the major API version -LIBPATCH = 0 - -PYDEPS = ["ops>=2.0.0", "pydantic>=2.11"] - -logger = logging.getLogger(__name__) - -MODEL_ERRORS = { - "not_leader": "this unit is not the leader", - "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", - "owner_no_refresh": "ERROR secret owner cannot use --refresh", -} - -RESOURCE_ALIASES = [ - "database", - "subject", - "topic", - "index", - "plugin-url", -] - -SECRET_PREFIX = "secret-" - - -############################################################################## -# Exceptions -############################################################################## - - -class DataInterfacesError(Exception): - """Common ancestor for DataInterfaces related exceptions.""" - - -class SecretError(DataInterfacesError): - """Common ancestor for Secrets related exceptions.""" - - -class SecretAlreadyExistsError(SecretError): - """A secret that was to be added already exists.""" - - -class SecretsUnavailableError(SecretError): - """Secrets aren't yet available for Juju version used.""" - - -class IllegalOperationError(DataInterfacesError): - """To be used when an operation is not allowed to be performed.""" - - -############################################################################## -# Global helpers / utilities -############################################################################## - - -def gen_salt() -> str: - """Generates a consistent salt.""" - return "".join(random.choices(string.ascii_letters + string.digits, k=16)) - - -def gen_hash(resource_name: str, salt: str) -> str: - """Generates a consistent hash based on the resource name and salt.""" - hasher = hashlib.sha256() - hasher.update(f"{resource_name}:{salt}".encode()) - return hasher.hexdigest()[:16] - - -def ensure_leader_for_app(f): - """Decorator to ensure that only leader can perform given operation.""" - - def wrapper(self, *args, **kwargs): - if self.component == self._local_app and not self._local_unit.is_leader(): - logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") - return - return f(self, *args, **kwargs) - - wrapper.leader_only = True - return wrapper - - -def get_encoded_dict( - relation: Relation, member: Unit | Application, field: str -) -> dict[str, Any] | None: - """Retrieve and decode an encoded field from relation data.""" - data = json.loads(relation.data[member].get(field, "{}")) - if isinstance(data, dict): - return data - logger.error("Unexpected datatype for %s instead of dict.", str(data)) - - -Diff = namedtuple("Diff", ["added", "changed", "deleted"]) -Diff.__doc__ = """ -A tuple for storing the diff between two data mappings. - -added - keys that were added -changed - keys that still exist but have new values -deleted - key that were deleted""" - - -def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: - """Retrieves the diff of the data in the relation changed databag for v1. - - Args: - old_data: dictionary of the stored data before the event. - new_data: dictionary of the received data to compute the diff. - - Returns: - a Diff instance containing the added, deleted and changed - keys from the event relation databag. - """ - old_data = old_data or {} - - # These are the keys that were added to the databag and triggered this event. - added = new_data.keys() - old_data.keys() - # These are the keys that were removed from the databag and triggered this event. - deleted = old_data.keys() - new_data.keys() - # These are the keys that already existed in the databag, - # but had their values changed. - changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} - # Return the diff with all possible changes. - return Diff(added, changed, deleted) - - -def resource_added(diff: Diff) -> bool: - """Ensures that one of the aliased resources has been added.""" - return any(item in diff.added for item in RESOURCE_ALIASES + ["resource"]) - - -def store_new_data( - relation: Relation, - component: Unit | Application, - new_data: dict[str, str], - short_uuid: str | None = None, -): - """Stores the new data in the databag for diff computation.""" - # First, the case for V0 - if not short_uuid: - relation.data[component].update({"data": json.dumps(new_data)}) - # Then the case for V1, where we have a ShortUUID - else: - data = json.loads(relation.data[component].get("data", "{}")) - if not isinstance(data, dict): - raise ValueError - newest_data = copy.deepcopy(data) - newest_data[short_uuid] = new_data - relation.data[component].update({"data": json.dumps(newest_data)}) - - -############################################################################## -# Helper classes -############################################################################## - -SecretGroup = NewType("SecretGroup", str) - - -SecretString = TypeAliasType("SecretString", Annotated[str, Field(pattern="secret:.*")]) - - -class SecretBool(_SecretField[bool]): - """Class for booleans as secrets.""" - - _inner_schema: ClassVar[CoreSchema] = core_schema.bool_schema() - _error_kind: ClassVar[str] = "bool_type" - - def _display(self) -> str: - return "****" - - -OptionalSecretStr: TypeAlias = SecretStr | None -OptionalSecretBool: TypeAlias = SecretBool | None - -OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) - -UserSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "user"] -TlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "tls"] -TlsSecretBool = Annotated[OptionalSecretBool, Field(exclude=True, default=None), "tls"] -MtlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mtls"] -ExtraSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "extra"] -EntitySecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "entity"] - - -class Scope(Enum): - """Peer relations scope.""" - - APP = "app" - UNIT = "unit" - - -class CachedSecret: - """Locally cache a secret. - - The data structure is precisely reusing/simulating as in the actual Secret Storage - """ - - KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] - - def __init__( - self, - model: Model, - component: Application | Unit, - label: str, - secret_uri: str | None = None, - ): - self._secret_meta = None - self._secret_content = {} - self._secret_uri = secret_uri - self.label = label - self._model = model - self.component = component - self.current_label = None - - @property - def meta(self) -> Secret | None: - """Getting cached secret meta-information.""" - if not self._secret_meta: - if not (self._secret_uri or self.label): - return - - try: - self._secret_meta = self._model.get_secret(label=self.label) - except SecretNotFoundError: - # Falling back to seeking for potential legacy labels - logger.info(f"Secret with label {self.label} not found") - - # If still not found, to be checked by URI, to be labelled with the proposed label - if not self._secret_meta and self._secret_uri: - self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) - return self._secret_meta - - ########################################################################## - # Public functions - ########################################################################## - - def add_secret( - self, - content: dict[str, str], - relation: Relation | None = None, - label: str | None = None, - ) -> Secret: - """Create a new secret.""" - if self._secret_uri: - raise SecretAlreadyExistsError( - "Secret is already defined with uri %s", self._secret_uri - ) - - label = self.label if not label else label - - secret = self.component.add_secret(content, label=label) - if relation and relation.app != self._model.app: - # If it's not a peer relation, grant is to be applied - secret.grant(relation) - self._secret_uri = secret.id - self._secret_meta = secret - return self._secret_meta - - def get_content(self) -> dict[str, str]: - """Getting cached secret content.""" - if not self._secret_content: - if self.meta: - try: - self._secret_content = self.meta.get_content(refresh=True) - except (ValueError, ModelError) as err: - # https://bugs.launchpad.net/juju/+bug/2042596 - # Only triggered when 'refresh' is set - if isinstance(err, ModelError) and not any( - msg in str(err) for msg in self.KNOWN_MODEL_ERRORS - ): - raise - # Due to: ValueError: Secret owner cannot use refresh=True - self._secret_content = self.meta.get_content() - return self._secret_content - - def set_content(self, content: dict[str, str]) -> None: - """Setting cached secret content.""" - if not self.meta: - return - - if content == self.get_content(): - return - - if content: - self.meta.set_content(content) - self._secret_content = content - else: - self.meta.remove_all_revisions() - - def get_info(self) -> SecretInfo | None: - """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" - if self.meta: - return self.meta.get_info() - - def remove(self) -> None: - """Remove secret.""" - if not self.meta: - raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") - try: - self.meta.remove_all_revisions() - except SecretNotFoundError: - pass - self._secret_content = {} - self._secret_meta = None - self._secret_uri = None - - -class SecretCache: - """A data structure storing CachedSecret objects.""" - - def __init__(self, model: Model, component: Application | Unit): - self._model = model - self.component = component - self._secrets: dict[str, CachedSecret] = {} - - def get(self, label: str, uri: str | None = None) -> CachedSecret | None: - """Getting a secret from Juju Secret store or cache.""" - if not self._secrets.get(label): - secret = CachedSecret(self._model, self.component, label, uri) - if secret.meta: - self._secrets[label] = secret - return self._secrets.get(label) - - def add(self, label: str, content: dict[str, str], relation: Relation) -> CachedSecret: - """Adding a secret to Juju Secret.""" - if self._secrets.get(label): - raise SecretAlreadyExistsError(f"Secret {label} already exists") - - secret = CachedSecret(self._model, self.component, label) - secret.add_secret(content, relation) - self._secrets[label] = secret - return self._secrets[label] - - def remove(self, label: str) -> None: - """Remove a secret from the cache.""" - if secret := self.get(label): - try: - secret.remove() - self._secrets.pop(label) - except (SecretsUnavailableError, KeyError): - pass - else: - return - logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) - - -############################################################################## -# Models classes -############################################################################## - - -class PeerModel(BaseModel): - """Common Model for all peer relations.""" - - model_config = ConfigDict( - validate_by_name=True, - validate_by_alias=True, - populate_by_name=True, - serialize_by_alias=True, - alias_generator=lambda x: x.replace("_", "-"), - extra="allow", - ) - - @model_validator(mode="after") - def extract_secrets(self, info: ValidationInfo): - """Extract all secret_fields into their local field.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing as we're lacking context here.") - return self - repository: AbstractRepository = info.context.get("repository") - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = SecretGroup(field_info.metadata[0]) - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret = repository.get_secret(secret_group, secret_uri=None) - - if not secret: - logger.info(f"No secret for group {secret_group}") - continue - - value = secret.get_content().get(aliased_field) - - if value and field_info.annotation == OptionalSecretBool: - value = SecretBool(json.loads(value)) - elif value: - value = SecretStr(value) - setattr(self, field, value) - - return self - - @model_serializer(mode="wrap") - def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): - """Serializes the model writing the secrets in their respective secrets.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing serialization as we're lacking context here.") - return handler(self) - repository: AbstractRepository = info.context.get("repository") - - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = SecretGroup(field_info.metadata[0]) - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret = repository.get_secret(secret_group, secret_uri=None) - - value = getattr(self, field) - - actual_value = ( - value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value - ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) - - if secret is None: - if value: - secret = repository.add_secret( - aliased_field, - actual_value, - secret_group, - ) - if not secret or not secret.meta: - raise SecretError("No secret to send back") - continue - - content = secret.get_content() - full_content = copy.deepcopy(content) - - if value is None: - full_content.pop(aliased_field, None) - else: - full_content.update({aliased_field: actual_value}) - secret.set_content(full_content) - return handler(self) - - -class CommonModel(BaseModel): - """Common Model for both requirer and provider. - - request_id stores the request identifier for easier access. - resource is the requested resource. - """ - - model_config = ConfigDict( - validate_by_name=True, - validate_by_alias=True, - populate_by_name=True, - serialize_by_alias=True, - alias_generator=lambda x: x.replace("_", "-"), - extra="allow", - ) - - resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") - request_id: str | None = Field(default=None) - salt: str = Field( - description="This salt is used to create unique hashes even when other fields map 1-1", - default_factory=gen_salt, - ) - - @model_validator(mode="after") - def extract_secrets(self, info: ValidationInfo): - """Extract all secret_fields into their local field.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing as we're lacking context here.") - return self - repository: AbstractRepository = info.context.get("repository") - short_uuid = self.request_id or gen_hash(self.resource, self.salt) - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = field_info.metadata[0] - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret_field = repository.secret_field(secret_group, aliased_field).replace( - "-", "_" - ) - secret_uri: str | None = getattr(self, secret_field, None) - - if not secret_uri: - continue - - secret = repository.get_secret( - secret_group, secret_uri=secret_uri, short_uuid=short_uuid - ) - - if not secret: - logger.info(f"No secret for group {secret_group} and short uuid {short_uuid}") - continue - - value = secret.get_content().get(aliased_field) - if value and field_info.annotation == OptionalSecretBool: - value = SecretBool(json.loads(value)) - elif value: - value = SecretStr(value) - - setattr(self, field, value) - return self - - @model_serializer(mode="wrap") - def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): - """Serializes the model writing the secrets in their respective secrets.""" - _encountered_secrets: set[tuple[CachedSecret, str]] = set() - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing serialization as we're lacking context here.") - return handler(self) - repository: AbstractRepository = info.context.get("repository") - short_uuid = self.request_id or gen_hash(self.resource, self.salt) - # Backward compatibility for v0 regarding secrets. - if info.context.get("version") == "v0": - short_uuid = None - - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = field_info.metadata[0] - if not secret_group: - raise SecretsUnavailableError(field) - aliased_field = field_info.serialization_alias or field - secret_field = repository.secret_field(secret_group, aliased_field).replace( - "-", "_" - ) - secret_uri: str | None = getattr(self, secret_field, None) - secret = repository.get_secret( - secret_group, secret_uri=secret_uri, short_uuid=short_uuid - ) - - value = getattr(self, field) - - actual_value = ( - value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value - ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) - - if secret is None: - if value: - secret = repository.add_secret( - aliased_field, actual_value, secret_group, short_uuid - ) - if not secret or not secret.meta: - raise SecretError("No secret to send back") - setattr(self, secret_field, secret.meta.id) - continue - - content = secret.get_content() - full_content = copy.deepcopy(content) - - if value is None: - full_content.pop(aliased_field, None) - _encountered_secrets.add((secret, secret_field)) - else: - full_content.update({aliased_field: actual_value}) - secret.set_content(full_content) - - # Delete all empty secrets and clean up their fields. - for secret, secret_field in _encountered_secrets: - if not secret.get_content(): - # Setting a field to '' deletes it - setattr(self, secret_field, "") - repository.delete_secret(secret.label) - - return handler(self) - - @classmethod - def _get_secret_field(cls, field: str) -> SecretGroup | None: - """Checks if the field is a secret uri or not.""" - if not field.startswith(SECRET_PREFIX): - return None - - value = field.split("-")[1] - if info := cls.__pydantic_fields__.get(field.replace("-", "_")): - if info.annotation == SecretString: - return SecretGroup(value) - return None - - -class EntityPermissionModel(BaseModel): - """Entity Permissions Model.""" - - resource_name: str - resource_type: str - privileges: list - - -class RequirerCommonModel(CommonModel): - """Requirer side of the request model. - - extra_user_roles is used to request more roles for that user. - external_node_connectivity is used to indicate that the URI should be made for external clients when True - """ - - extra_user_roles: str | None = Field(default=None) - extra_group_roles: str | None = Field(default=None) - external_node_connectivity: bool = Field(default=False) - entity_type: Literal["USER", "GROUP"] | None = Field(default=None) - entity_permissions: list[EntityPermissionModel] | None = Field(default=None) - secret_mtls: SecretString | None = Field(default=None) - mtls_cert: MtlsSecretStr = Field(default=None) - - @model_validator(mode="after") - def validate_fields(self): - """Validates that no inconsistent request is being sent.""" - if self.entity_type and self.entity_type not in ["USER", "GROUP"]: - raise ValueError("Invalid entity-type. Possible values are USER and GROUP") - - if self.entity_type == "USER" and self.extra_group_roles: - raise ValueError("Inconsistent entity information. Use extra_user_roles instead") - - if self.entity_type == "GROUP" and self.extra_user_roles: - raise ValueError("Inconsistent entity information. Use extra_group_roles instead") - - return self - - -class ProviderCommonModel(CommonModel): - """Serialized fields added to the databag. - - endpoints stores the endpoints exposed to that client. - secret_user is a secret URI mapping to the user credentials - secret_tls is a secret URI mapping to the TLS certificate - secret_extra is a secret URI for all additional secrets requested. - """ - - endpoints: str | None = Field(default=None) - read_only_endpoints: str | None = Field(default=None) - secret_user: SecretString | None = Field(default=None) - secret_tls: SecretString | None = Field(default=None) - secret_extra: SecretString | None = Field(default=None) - secret_entity: SecretString | None = Field(default=None) - - -class ResourceProviderModel(ProviderCommonModel): - """Extended model including the deserialized fields.""" - - username: UserSecretStr = Field(default=None) - password: UserSecretStr = Field(default=None) - uris: UserSecretStr = Field(default=None) - read_only_uris: UserSecretStr = Field(default=None) - tls: TlsSecretBool = Field(default=None) - tls_ca: TlsSecretStr = Field(default=None) - entity_name: EntitySecretStr = Field(default=None) - entity_password: EntitySecretStr = Field(default=None) - version: str | None = Field(default=None) - - -class RequirerDataContractV0(RequirerCommonModel): - """Backward compatibility.""" - - version: Literal["v0"] = Field(default="v0") - - original_field: str = Field(exclude=True, default="") - - @model_validator(mode="before") - @classmethod - def ensure_original_field(cls, data: Any): - """Ensures that we keep the original field.""" - if isinstance(data, dict): - for alias in RESOURCE_ALIASES: - if data.get(alias) is not None: - data["original_field"] = alias - break - else: - for alias in RESOURCE_ALIASES: - if getattr(data, alias) is not None: - data.original_field = alias - return data - - -TResourceProviderModel = TypeVar("TResourceProviderModel", bound=ResourceProviderModel) -TRequirerCommonModel = TypeVar("TRequirerCommonModel", bound=RequirerCommonModel) - - -class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): - """The new Data Contract.""" - - version: Literal["v1"] = Field(default="v1") - requests: list[TRequirerCommonModel] - - -def discriminate_on_version(payload: Any) -> str: - """Use the version to discriminate.""" - if isinstance(payload, dict): - return payload.get("version", "v0") - return getattr(payload, "version", "v0") - - -RequirerDataContractType = Annotated[ - Annotated[RequirerDataContractV0, Tag("v0")] | Annotated[RequirerDataContractV1, Tag("v1")], - Discriminator(discriminate_on_version), -] - - -RequirerDataContract = TypeAdapter(RequirerDataContractType) - - -class DataContractV0(ResourceProviderModel): - """The Data contract of the response, for V0.""" - - -class DataContractV1(BaseModel, Generic[TResourceProviderModel]): - """The Data contract of the response, for V1.""" - - version: Literal["v1"] = Field(default="v1") - requests: list[TResourceProviderModel] = Field(default_factory=list) - - -DataContact = TypeAdapter(DataContractV1[ResourceProviderModel]) - - -TCommonModel = TypeVar("TCommonModel", bound=CommonModel) - - -def is_topic_value_acceptable(value: str | None) -> str | None: - """Check whether the given Kafka topic value is acceptable.""" - if value and "*" in value[:3]: - raise ValueError(f"Error on topic '{value}',, unacceptable value.") - return value - - -class KafkaRequestModel(RequirerCommonModel): - """Specialised model for Kafka.""" - - consumer_group_prefix: Annotated[str | None, AfterValidator(is_topic_value_acceptable)] = ( - Field(default=None) - ) - - -class KafkaResponseModel(ResourceProviderModel): - """Kafka response model.""" - - consumer_group_prefix: ExtraSecretStr = Field(default=None) - zookeeper_uris: ExtraSecretStr = Field(default=None) - - -############################################################################## -# AbstractRepository class -############################################################################## - - -class AbstractRepository(ABC): - """Abstract repository interface.""" - - @abstractmethod - def get_secret( - self, secret_group, secret_uri: str | None, short_uuid: str | None = None - ) -> CachedSecret | None: - """Gets a secret from the secret cache by uri or label.""" - ... - - @abstractmethod - def get_secret_field( - self, - field: str, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> str | None: - """Gets a value for a field stored in a secret group.""" - ... - - @abstractmethod - def get_field(self, field: str) -> str | None: - """Gets the value for one field.""" - ... - - @abstractmethod - def get_fields(self, *fields: str) -> dict[str, str | None]: - """Gets the values for all provided fields.""" - ... - - @abstractmethod - def write_field(self, field: str, value: Any) -> None: - """Writes the value in the field, without any secret support.""" - ... - - @abstractmethod - def write_fields(self, mapping: dict[str, Any]) -> None: - """Writes the values of mapping in the fields without any secret support (keys of mapping).""" - ... - - def write_secret_field( - self, field: str, value: Any, group: SecretGroup - ) -> CachedSecret | None: - """Writes a secret field.""" - ... - - @abstractmethod - def add_secret( - self, - field: str, - value: Any, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> CachedSecret | None: - """Gets a value for a field stored in a secret group.""" - ... - - @abstractmethod - def delete_secret(self, label: str): - """Deletes a secret by its label.""" - ... - - @abstractmethod - def delete_field(self, field: str) -> None: - """Deletes a field.""" - ... - - @abstractmethod - def delete_fields(self, *fields: str) -> None: - """Deletes all the provided fields.""" - ... - - @abstractmethod - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - """Delete a field stored in a secret group.""" - ... - - @abstractmethod - def register_secret(self, secret_group: SecretGroup, short_uuid: str | None = None) -> None: - """Registers a secret using the repository.""" - ... - - @abstractmethod - def get_data(self) -> dict[str, Any] | None: - """Gets the whole data.""" - ... - - @abstractmethod - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Builds a secret field.""" - - -class OpsRepository(AbstractRepository): - """Implementation for ops repositories, with some methods left out.""" - - SECRET_FIELD_NAME: str - - IGNORES_GROUPS: list[SecretGroup] = [] - - uri_to_databag: bool = True - - def __init__( - self, - model: Model, - relation: Relation | None, - component: Unit | Application, - ): - self._local_app = model.app - self._local_unit = model.unit - self.relation = relation - self.component = component - self.model = model - self.secrets = SecretCache(model, component) - - @abstractmethod - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None - ) -> str: - """Generate unique group mapping for secrets within a relation context.""" - ... - - @override - def get_data(self) -> dict[str, Any] | None: - ret: dict[str, Any] = {} - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - for key, value in self.relation.data[self.component].items(): - try: - ret[key] = json.loads(value) - except json.JSONDecodeError: - ret[key] = value - - return ret - - @override - @ensure_leader_for_app - def get_field( - self, - field: str, - ) -> str | None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - relation_data = self.relation.data[self.component] - return relation_data.get(field) - - @override - @ensure_leader_for_app - def get_fields(self, *fields: str) -> dict[str, str]: - res = {} - for field in fields: - if (value := self.get_field(field)) is not None: - res[field] = value - return res - - @override - @ensure_leader_for_app - def write_field(self, field: str, value: Any) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - if not value: - return None - self.relation.data[self.component].update({field: value}) - - @override - @ensure_leader_for_app - def write_fields(self, mapping: dict[str, Any]) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - (self.write_field(field, value) for field, value in mapping.items()) - - @override - @ensure_leader_for_app - def write_secret_field( - self, field: str, value: Any, secret_group: SecretGroup - ) -> CachedSecret | None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - label = self._generate_secret_label(self.relation, secret_group) - secret_uri = self.get_field(self.secret_field(secret_group, field)) - - secret = self.secrets.get(label=label, uri=secret_uri) - if not secret: - return self.add_secret(field, value, secret_group) - else: - content = secret.get_content() - full_content = copy.deepcopy(content) - full_content.update({field: value}) - secret.set_content(full_content) - return secret - - @override - @ensure_leader_for_app - def delete_field(self, field: str) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - relation_data = self.relation.data[self.component] - try: - relation_data.pop(field) - except KeyError: - logger.debug( - f"Non existent field {field} was attempted to be removed from the databag (relation ID: {self.relation.id})" - ) - - @override - @ensure_leader_for_app - def delete_fields(self, *fields: str) -> None: - (self.delete_field(field) for field in fields) - - @override - @ensure_leader_for_app - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - relation_data = self.relation.data[self.component] - secret_field = self.secret_field(secret_group, field) - - label = self._generate_secret_label(self.relation, secret_group) - secret_uri = relation_data.get(secret_field) - - secret = self.secrets.get(label=label, uri=secret_uri) - - if not secret: - logging.error(f"Can't delete secret for relation {self.relation.id}") - return None - - content = secret.get_content() - new_content = copy.deepcopy(content) - try: - new_content.pop(field) - except KeyError: - logging.debug( - f"Non-existing secret '{field}' was attempted to be removed" - f"from relation {self.relation.id} and group {secret_group}" - ) - - # Write the new secret content if necessary - if new_content: - secret.set_content(new_content) - return - - # Remove the secret from the relation if it's fully gone. - try: - relation_data.pop(field) - except KeyError: - pass - self.secrets.remove(label) - return - - @ensure_leader_for_app - def register_secret(self, uri: str, secret_group: SecretGroup, short_uuid: str | None = None): - """Registers the secret group for this relation. - - [MAGIC HERE] - If we fetch a secret using get_secret(id=, label=), - then will be "stuck" on the Secret object, whenever it may - appear (i.e. as an event attribute, or fetched manually) on future occasions. - - This will allow us to uniquely identify the secret on Provider side (typically on - 'secret-changed' events), and map it to the corresponding relation. - """ - if not self.relation: - raise ValueError("Cannot register without relation.") - - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - CachedSecret(self.model, self.component, label, uri).meta - - @override - def get_secret( - self, secret_group, secret_uri: str | None, short_uuid: str | None = None - ) -> CachedSecret | None: - """Gets a secret from the secret cache by uri or label.""" - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - return None - - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - - return self.secrets.get(label=label, uri=secret_uri) - - @override - def get_secret_field( - self, - field: str, - secret_group: SecretGroup, - uri: str | None = None, - short_uuid: str | None = None, - ) -> Any | None: - """Gets a value for a field stored in a secret group.""" - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - return None - - secret_field = self.secret_field(secret_group, field) - - relation_data = self.relation.data[self.component] - secret_uri = uri or relation_data.get(secret_field) - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - - if self.uri_to_databag and not secret_uri: - logger.info(f"No secret for group {secret_group} in relation {self.relation}") - return None - - secret = self.secrets.get(label=label, uri=secret_uri) - - if not secret: - logger.info(f"No secret for group {secret_group} in relation {self.relation}") - return None - - content = secret.get_content().get(field) - - if not content: - return - - try: - return json.loads(content) - except json.JSONDecodeError: - return content - - @override - @ensure_leader_for_app - def add_secret( - self, - field: str, - value: Any, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> CachedSecret | None: - if not self.relation: - logger.info("No relation to get value from") - return None - - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - - label = self._generate_secret_label(self.relation, secret_group, short_uuid) - - secret = self.secrets.add(label, {field: value}, self.relation) - - if not secret.meta or not secret.meta.id: - logging.error("Secret is missing Secret ID") - raise SecretError("Secret added but is missing Secret ID") - - return secret - - @override - @ensure_leader_for_app - def delete_secret(self, label: str) -> None: - self.secrets.remove(label) - - -@final -class OpsRelationRepository(OpsRepository): - """Implementation of the Abstract Repository for non peer relations.""" - - SECRET_FIELD_NAME: str = "secret" - - @override - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None - ) -> str: - """Generate unique group_mappings for secrets within a relation context.""" - if short_uuid: - return f"{relation.name}.{relation.id}.{short_uuid}.{secret_group}.secret" - return f"{relation.name}.{relation.id}.{secret_group}.secret" - - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Generates the field name to store in the peer relation.""" - return f"{self.SECRET_FIELD_NAME}-{secret_group}" - - -class OpsPeerRepository(OpsRepository): - """Implementation of the Ops Repository for peer relations.""" - - SECRET_FIELD_NAME = "internal_secret" - - IGNORES_GROUPS = [ - SecretGroup("user"), - SecretGroup("entity"), - SecretGroup("mtls"), - SecretGroup("tls"), - ] - - uri_to_databag: bool = False - - @property - def scope(self) -> Scope: - """Returns a scope.""" - if isinstance(self.component, Application): - return Scope.APP - if isinstance(self.component, Unit): - return Scope.UNIT - raise ValueError("Invalid component, neither a Unit nor an Application") - - @override - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None - ) -> str: - """Generate unique group_mappings for secrets within a relation context.""" - members = [relation.name, self._local_app.name, self.scope.value] - - if secret_group != SecretGroup("extra"): - members.append(secret_group) - return f"{'.'.join(members)}" - - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Generates the field name to store in the peer relation.""" - if not field: - raise ValueError("Must have a field.") - return f"{field}@{secret_group}" - - -@final -class OpsPeerUnitRepository(OpsPeerRepository): - """Implementation for a unit.""" - - @override - def __init__(self, model: Model, relation: Relation | None, component: Unit): - super().__init__(model, relation, component) - - -@final -class OpsOtherPeerUnitRepository(OpsPeerRepository): - """Implementation for a remote unit.""" - - @override - def __init__(self, model: Model, relation: Relation | None, component: Unit): - if component == model.unit: - raise ValueError(f"Can't instantiate {self.__class__.__name__} with local unit.") - super().__init__(model, relation, component) - - @override - def write_field(self, field: str, value: Any) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def write_fields(self, mapping: dict[str, Any]) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def add_secret( - self, field: str, value: Any, secret_group: SecretGroup, short_uuid: str | None = None - ) -> CachedSecret | None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_field(self, field: str) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_fields(self, *fields: str) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - -TRepository = TypeVar("TRepository", bound=OpsRepository) -TCommon = TypeVar("TCommon", bound=BaseModel) -TPeerCommon = TypeVar("TPeerCommon", bound=PeerModel) -TCommonBis = TypeVar("TCommonBis", bound=BaseModel) - - -class RepositoryInterface(Generic[TRepository, TCommon]): - """Repository builder.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - component: Unit | Application, - repository_type: type[TRepository], - model: type[TCommon] | TypeAdapter | None, - ): - self.charm = charm - self._model = charm.model - self.repository_type = repository_type - self.relation_name = relation_name - self.model = model - self.component = component - - @property - def relations(self) -> list[Relation]: - """The list of Relation instances associated with this relation name.""" - return self._model.relations[self.relation_name] - - def repository( - self, relation_id: int, component: Unit | Application | None = None - ) -> TRepository: - """Returns a repository for the relation.""" - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - return self.repository_type(self._model, relation, component or self.component) - - @overload - def build_model( - self, - relation_id: int, - model: type[TCommonBis], - component: Unit | Application | None = None, - ) -> TCommonBis: ... - - @overload - def build_model( - self, - relation_id: int, - model: type[TCommon], - component: Unit | Application | None = None, - ) -> TCommon: ... - - @overload - def build_model( - self, - relation_id: int, - model: TypeAdapter[TCommonBis], - component: Unit | Application | None = None, - ) -> TCommonBis: ... - - @overload - def build_model( - self, - relation_id: int, - model: None = None, - component: Unit | Application | None = None, - ) -> TCommon: ... - - def build_model( - self, - relation_id: int, - model: type[TCommon] | TypeAdapter[TCommonBis] | None = None, - component: Unit | Application | None = None, - ) -> TCommon | TCommonBis: - """Builds a model using the repository for that relation.""" - model = model or self.model # First the provided model (allows for specialisation) - component = component or self.component - if not model: - raise ValueError("Missing model to specialise data") - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - return build_model(self.repository_type(self._model, relation, component), model) - - def write_model( - self, relation_id: int, model: BaseModel, context: dict[str, str] | None = None - ): - """Writes the model using the repository.""" - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - - write_model( - self.repository_type(self._model, relation, self.component), model, context=context - ) - - -class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, TCommon]): - """Specialised Interface to build repositories for app peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.app, OpsRelationRepository, model) - - -class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): - """Specialised Interface to build repositories for app peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.app, OpsPeerRepository, model) - - -class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): - """Specialised Interface to build repositories for this unit peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.unit, OpsPeerUnitRepository, model) - - -class OpsOtherPeerUnitRepositoryInterface( - RepositoryInterface[OpsOtherPeerUnitRepository, TPeerCommon] -): - """Specialised Interface to build repositories for another unit peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - unit: Unit, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, unit, OpsOtherPeerUnitRepository, model) - - -############################################################################## -# DDD implementation methods -############################################################################## -############################################################################## - - -def build_model(repository: AbstractRepository, model: type[TCommon] | TypeAdapter) -> TCommon: - """Builds a common model using the provided repository and provided model structure.""" - data = repository.get_data() or {} - - data.pop("data", None) - - # Beware this means all fields should have a default value here. - if isinstance(model, TypeAdapter): - return model.validate_python(data, context={"repository": repository}) - - return model.model_validate(data, context={"repository": repository}) - - -def write_model( - repository: AbstractRepository, model: BaseModel, context: dict[str, str] | None = None -): - """Writes the data stored in the model using the repository object.""" - context = context or {} - dumped = model.model_dump( - mode="json", context={"repository": repository} | context, exclude_none=False - ) - for field, value in dumped.items(): - if value is None: - repository.delete_field(field) - continue - dumped_value = value if isinstance(value, str) else json.dumps(value) - repository.write_field(field, dumped_value) - - -############################################################################## -# Custom Events -############################################################################## - - -class ResourceProviderEvent(EventBase, Generic[TRequirerCommonModel]): - """Resource requested event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, request - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - request: TRequirerCommonModel, - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.request = request - - def snapshot(self) -> dict[str, Any]: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["request"] = pickle.dumps(self.request) - return snapshot - - def restore(self, snapshot: dict[str, Any]): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - self.request = pickle.loads(snapshot["request"]) - - -class ResourceRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource requested event.""" - - pass - - -class ResourceEntityRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource Entity requested event.""" - - pass - - -class ResourceEntityPermissionsChangedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource entity permissions changed event.""" - - pass - - -class MtlsCertUpdatedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource entity permissions changed event.""" - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - request: TRequirerCommonModel, - old_mtls_cert: str | None = None, - ): - super().__init__(handle, relation, app, unit, request) - - self.old_mtls_cert = old_mtls_cert - - def snapshot(self): - """Return a snapshot of the event.""" - return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} - - def restore(self, snapshot): - """Restore the event from a snapshot.""" - super().restore(snapshot) - self.old_mtls_cert = snapshot["old_mtls_cert"] - - -class BulkResourcesRequestedEvent(EventBase, Generic[TRequirerCommonModel]): - """Resource requested event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, request - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - requests: list[TRequirerCommonModel], - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.requests = requests - - def snapshot(self) -> dict[str, Any]: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["requests"] = [pickle.dumps(request) for request in self.requests] - return snapshot - - def restore(self, snapshot: dict[str, Any]): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - self.requests = [pickle.loads(request) for request in snapshot["requests"]] - - -class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): - """Database events. - - This class defines the events that the database can emit. - """ - - bulk_resources_requested = EventSource(BulkResourcesRequestedEvent) - resource_requested = EventSource(ResourceRequestedEvent) - resource_entity_requested = EventSource(ResourceEntityRequestedEvent) - resource_entity_permissions_changed = EventSource(ResourceEntityPermissionsChangedEvent) - mtls_cert_updated = EventSource(MtlsCertUpdatedEvent) - - -class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): - """Resource created/changed event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, response - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - response: TResourceProviderModel, - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.response = response - - def snapshot(self) -> dict: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["response"] = pickle.dumps(self.response) - return snapshot - - def restore(self, snapshot: dict): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - - self.response = pickle.loads(snapshot["response"]) - - -class ResourceCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Resource has been created.""" - - pass - - -class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Resource entity has been created.""" - - pass - - -class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read/Write enpoints are changed.""" - - pass - - -class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read-only enpoints are changed.""" - - pass - - -class AuthenticationUpdatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Authentication was updated for a user.""" - - pass - - -class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): - """Database events. - - This class defines the events that the database can emit. - """ - - resource_created = EventSource(ResourceCreatedEvent) - resource_entity_created = EventSource(ResourceEntityCreatedEvent) - endpoints_changed = EventSource(ResourceEndpointsChangedEvent) - read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) - authentication_updated = EventSource(AuthenticationUpdatedEvent) - - -############################################################################## -# Event Handlers -############################################################################## - - -class EventHandlers(Object): - """Requires-side of the relation.""" - - component: Application | Unit - interface: RepositoryInterface - - def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ""): - """Manager of base client relations.""" - if not unique_key: - unique_key = relation_name - super().__init__(charm, unique_key) - - self.charm = charm - self.relation_name = relation_name - - self.framework.observe( - charm.on[self.relation_name].relation_changed, - self._on_relation_changed_event, - ) - - self.framework.observe( - self.charm.on[self.relation_name].relation_created, - self._on_relation_created_event, - ) - - self.framework.observe( - charm.on.secret_changed, - self._on_secret_changed_event, - ) - - @property - def relations(self) -> list[Relation]: - """Shortcut to get access to the relations.""" - return self.interface.relations - - # Event handlers - - def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: - """Event emitted when the relation is created.""" - pass - - @abstractmethod - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - - @abstractmethod - def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - - @abstractmethod - def _handle_event( - self, - ): - """Handles the event and reacts accordingly.""" - pass - - def compute_diff( - self, - relation: Relation, - request: RequirerCommonModel | ResourceProviderModel, - repository: AbstractRepository | None = None, - store: bool = True, - ) -> Diff: - """Computes, stores and returns a diff for that request.""" - if not repository: - repository = OpsRelationRepository(self.model, relation, component=relation.app) - - # Gets the data stored in the databag for diff computation - old_data = get_encoded_dict(relation, self.component, "data") - - # In case we're V1, we select specifically this request - if old_data and request.request_id: - old_data: dict | None = old_data.get(request.request_id, None) - - # dump the data of the current request so we can compare - new_data = request.model_dump( - mode="json", - exclude={"data"}, - exclude_none=True, - exclude_defaults=True, - ) - - # Computes the diff - _diff = diff(old_data, new_data) - - if store: - # Update the databag with the new data for later diff computations - store_new_data(relation, self.component, new_data, short_uuid=request.request_id) - - return _diff - - def _relation_from_secret_label(self, secret_label: str) -> Relation | None: - """Retrieve the relation that belongs to a secret label.""" - contents = secret_label.split(".") - - if not (contents and len(contents) >= 3): - return - - try: - relation_id = int(contents[1]) - except ValueError: - return - - relation_name = contents[0] - - try: - return self.model.get_relation(relation_name, relation_id) - except ModelError: - return - - def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: - """Retrieve the relation that belongs to a secret label.""" - contents = secret_label.split(".") - - if not (contents and len(contents) >= 5): - return - - return contents[2] - - -class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): - """Event Handler for resource provider.""" - - on = ResourceProvidesEvents[TRequirerCommonModel]() # type: ignore[reportAssignmentType] - - def __init__( - self, - charm: CharmBase, - relation_name: str, - request_model: type[TRequirerCommonModel], - unique_key: str = "", - mtls_enabled: bool = False, - bulk_event: bool = False, - ): - """Builds a resource provider event handler. - - Args: - charm: The charm. - relation_name: The relation name this event handler is listening to. - request_model: The request model that is expected to be received. - unique_key: An optional unique key for that object. - mtls_enabled: If True, means the server supports MTLS integration. - bulk_event: If this is true, only one event will be emitted with all requests in the case of a v1 requirer. - """ - super().__init__(charm, relation_name, unique_key) - self.component = self.charm.app - self.request_model = request_model - self.interface = OpsRelationRepositoryInterface(charm, relation_name, request_model) - self.mtls_enabled = mtls_enabled - self.bulk_event = bulk_event - - @staticmethod - def _validate_diff(event: RelationEvent, _diff: Diff) -> None: - """Validates that entity information is not changed after relation is established. - - - When entity-type changes, backwards compatibility is broken. - - When extra-user-roles changes, role membership checks become incredibly complex. - - When extra-group-roles changes, role membership checks become incredibly complex. - """ - if not isinstance(event, RelationChangedEvent): - return - - for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: - if key in _diff.changed: - raise ValueError(f"Cannot change {key} after relation has already been created") - - def _dispatch_events(self, event: RelationEvent, _diff: Diff, request: RequirerCommonModel): - if self.mtls_enabled and "secret-mtls" in _diff.added: - getattr(self.on, "mtls_cert_updated").emit( - event.relation, app=event.app, unit=event.unit, request=request, old_mtls_cert=None - ) - return - # Emit a resource requested event if the setup key (resource name) - # was added to the relation databag, but the entity-type key was not. - if resource_added(_diff) and "entity-type" not in _diff.added: - getattr(self.on, "resource_requested").emit( - event.relation, - app=event.app, - unit=event.unit, - request=request, - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - # Emit an entity requested event if the setup key (resource name) - # was added to the relation databag, in addition to the entity-type key. - if resource_added(_diff) and "entity-type" in _diff.added: - getattr(self.on, "resource_entity_requested").emit( - event.relation, - app=event.app, - unit=event.unit, - request=request, - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - # Emit a permissions changed event if the setup key (resource name) - # was added to the relation databag, and the entity-permissions key changed. - if ( - not resource_added(_diff) - and "entity-type" not in _diff.added - and ("entity-permissions" in _diff.added or "entity-permissions" in _diff.changed) - ): - getattr(self.on, "resource_entity_permissions_changed").emit( - event.relation, app=event.app, unit=event.unit, request=request - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - @override - def _handle_event( - self, - event: RelationChangedEvent, - repository: AbstractRepository, - request: RequirerCommonModel, - ): - _diff = self.compute_diff(event.relation, request, repository) - - self._validate_diff(event, _diff) - self._dispatch_events(event, _diff, request) - - def _handle_bulk_event( - self, - event: RelationChangedEvent, - repository: AbstractRepository, - request_model: RequirerDataContractV1[TRequirerCommonModel], - ): - """Validate all the diffs, then dispatch the bulk event AND THEN stores the diff. - - This allows for the developer to process the diff and store it themselves - """ - for request in request_model.requests: - # Compute the diff without storing it so we can validate the diffs. - _diff = self.compute_diff(event.relation, request, repository, store=False) - self._validate_diff(event, _diff) - - getattr(self.on, "bulk_resources_requested").emit( - event.relation, app=event.app, unit=event.unit, requests=request_model.requests - ) - - # Store all the diffs if they were not already stored. - for request in request_model.requests: - new_data = request.model_dump( - mode="json", - exclude={"data"}, - context={"repository": repository}, - exclude_none=True, - exclude_defaults=True, - ) - store_new_data(event.relation, self.component, new_data, request.request_id) - - @override - def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: - if not self.mtls_enabled: - logger.info("MTLS is disabled, exiting early.") - return - if not event.secret.label: - return - - relation = self._relation_from_secret_label(event.secret.label) - short_uuid = self._short_uuid_from_secret_label(event.secret.label) - - if not relation: - logging.info( - f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" - ) - return - - if relation.app == self.charm.app: - logging.info("Secret changed event ignored for Secret Owner") - return - - if relation.name != self.relation_name: - logging.info("Secret changed on wrong relation.") - return - - remote_unit = None - for unit in relation.units: - if unit.app != self.charm.app: - remote_unit = unit - break - - repository = OpsRelationRepository(self.model, relation, component=relation.app) - version = repository.get_field("version") or "v0" - - old_mtls_cert = event.secret.get_content().get("mtls-cert") - logger.info("mtls-cert-updated") - - # V0, just fire the event. - if version == "v0": - request = build_model(repository, RequirerDataContractV0) - # V1, find the corresponding request. - else: - request_model = build_model(repository, RequirerDataContractV1[self.request_model]) - if not short_uuid: - return - for _request in request_model.requests: - if _request.request_id == short_uuid: - request = _request - break - else: - logger.info(f"Unknown request id {short_uuid}") - return - - getattr(self.on, "mtls_cert_updated").emit( - relation, - app=relation.app, - unit=remote_unit, - request=request, - mtls_cert=old_mtls_cert, - ) - - @override - def _on_relation_changed_event(self, event: RelationChangedEvent): - if not self.charm.unit.is_leader(): - return - - repository = OpsRelationRepository( - self.model, event.relation, component=event.relation.app - ) - - # Don't do anything until we get some data - if not repository.get_data(): - return - - version = repository.get_field("version") or "v0" - if version == "v0": - request_model = build_model(repository, RequirerDataContractV0) - old_name = request_model.original_field - request_model.request_id = None # For safety, let's ensure that we don't have a model. - self._handle_event(event, repository, request_model) - logger.info( - f"Patching databag for v0 compatibility: replacing 'resource' by '{old_name}'" - ) - self.interface.repository( - event.relation.id, - ).write_field(old_name, request_model.resource) - else: - request_model = build_model(repository, RequirerDataContractV1[self.request_model]) - if self.bulk_event: - self._handle_bulk_event(event, repository, request_model) - return - for request in request_model.requests: - self._handle_event(event, repository, request) - - def set_response(self, relation_id: int, response: ResourceProviderModel): - r"""Sets a response in the databag. - - This function will react accordingly to the version number. - If the version number is v0, then we write the data directly in the databag. - If the version number is v1, then we write the data in the list of responses. - - /!\ This function updates a response if it was already present in the databag! - - Args: - relation_id: The specific relation id for that event. - response: The response to write in the databag. - """ - if not self.charm.unit.is_leader(): - return - - relation = self.charm.model.get_relation(self.relation_name, relation_id) - - if not relation: - raise ValueError("Missing relation.") - - repository = OpsRelationRepository(self.model, relation, component=relation.app) - version = repository.get_field("version") or "v0" - - if version == "v0": - # Ensure the request_id is None - response.request_id = None - self.interface.write_model( - relation_id, response, context={"version": "v0"} - ) # {"database": "database-name", "secret-user": "uri", ...} - return - - model = self.interface.build_model(relation_id, DataContractV1[response.__class__]) - - # for/else syntax allows to execute the else if break was not called. - # This allows us to update or append easily. - for index, _response in enumerate(model.requests): - if _response.request_id == response.request_id: - model.requests[index] = response - break - else: - model.requests.append(response) - - self.interface.write_model(relation_id, model) - return - - -class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): - """Event Handler for resource requirer.""" - - on = ResourceRequiresEvents[TResourceProviderModel]() # type: ignore[reportAssignmentType] - - def __init__( - self, - charm: CharmBase, - relation_name: str, - requests: list[RequirerCommonModel], - response_model: type[TResourceProviderModel], - unique_key: str = "", - relation_aliases: list[str] | None = None, - ): - super().__init__(charm, relation_name, unique_key) - self.component = self.charm.unit - self.relation_aliases = relation_aliases - self._requests = requests - self.response_model = DataContractV1[response_model] - self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( - OpsRelationRepositoryInterface(charm, relation_name, self.response_model) - ) - - if requests: - self._request_model = requests[0].__class__ - else: - self._request_model = RequirerCommonModel - - # First, check that the number of aliases matches the one defined in charm metadata. - if self.relation_aliases: - relation_connection_limit = self.charm.meta.requires[relation_name].limit - if len(self.relation_aliases) != relation_connection_limit: - raise ValueError( - f"Invalid number of aliases, expected {relation_connection_limit}, received {len(self.relation_aliases)}" - ) - - # Created custom event names for each alias. - if self.relation_aliases: - for relation_alias in self.relation_aliases: - self.on.define_event( - f"{relation_alias}_resource_created", - ResourceCreatedEvent, - ) - self.on.define_event( - f"{relation_alias}_resource_entity_created", - ResourceEntityCreatedEvent, - ) - self.on.define_event( - f"{relation_alias}_endpoints_changed", - ResourceEndpointsChangedEvent, - ) - self.on.define_event( - f"{relation_alias}_read_only_endpoints_changed", - ResourceReadOnlyEndpointsChangedEvent, - ) - - ############################################################################## - # Extra useful functions - ############################################################################## - def is_resource_created( - self, - rel_id: int, - request_id: str, - model: DataContractV1[TResourceProviderModel] | None = None, - ) -> bool: - """Checks if a resource has been created or not. - - Args: - rel_id: The relation id to check. - request_id: The specific request id to check. - model: An optional model to use (for performances). - """ - if not model: - relation = self.model.get_relation(self.relation_name, rel_id) - if not relation: - return False - model = self.interface.build_model(relation_id=rel_id, component=relation.app) - for request in model.requests: - if request.request_id == request_id: - return request.secret_user is not None or request.secret_entity is not None - return False - - def are_all_resources_created(self, rel_id: int) -> bool: - """Checks that all resources have been created for a relation. - - Args: - rel_id: The relation id to check. - """ - relation = self.model.get_relation(self.relation_name, rel_id) - if not relation: - return False - model = self.interface.build_model(relation_id=rel_id, component=relation.app) - return all( - self.is_resource_created(rel_id, request.request_id, model) - for request in model.requests - if request.request_id - ) - - @staticmethod - def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: - # Actual checking method. - # No need to check for psycopg here, it's been checked before. - if not psycopg2: - return False - - try: - with psycopg2.connect(connection_string) as connection: - with connection.cursor() as cursor: - cursor.execute( - "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) - ) - return cursor.fetchone() is not None - except psycopg2.Error as e: - logger.exception( - f"failed to check whether {plugin} plugin is enabled in the database: %s", - str(e), - ) - return False - - def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: - """Returns whether a plugin is enabled in the database. - - Args: - plugin: name of the plugin to check. - relation_index: Optional index to check the database (default: 0 - first relation). - """ - if not psycopg2: - return False - - # Can't check a non existing relation. - if len(self.relations) <= relation_index: - return False - - relation = self.relations[relation_index] - model = self.interface.build_model(relation_id=relation.id, component=relation.app) - for request in model.requests: - if request.endpoints and request.username and request.password: - host = request.endpoints.split(":")[0] - username = request.username.get_secret_value() - password = request.password.get_secret_value() - - connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" - return self._is_pg_plugin_enabled(plugin, connection_string) - logger.info("No valid request to use to check for plugin.") - return False - - ############################################################################## - # Helpers for aliases - ############################################################################## - - def _assign_relation_alias(self, relation_id: int) -> None: - """Assigns an alias to a relation. - - This function writes in the unit data bag. - - Args: - relation_id: the identifier for a particular relation. - """ - # If no aliases were provided, return immediately. - if not self.relation_aliases: - return - - # Return if an alias was already assigned to this relation - # (like when there are more than one unit joining the relation). - relation = self.charm.model.get_relation(self.relation_name, relation_id) - if relation and relation.data[self.charm.unit].get("alias"): - return - - # Retrieve the available aliases (the ones that weren't assigned to any relation). - available_aliases = self.relation_aliases[:] - for relation in self.charm.model.relations[self.relation_name]: - alias = relation.data[self.charm.unit].get("alias") - if alias: - logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) - available_aliases.remove(alias) - - # Set the alias in the unit relation databag of the specific relation. - relation = self.charm.model.get_relation(self.relation_name, relation_id) - if relation: - relation.data[self.charm.unit].update({"alias": available_aliases[0]}) - - # We need to set relation alias also on the application level so, - # it will be accessible in show-unit juju command, executed for a consumer application unit - if relation and self.charm.unit.is_leader(): - relation.data[self.charm.app].update({"alias": available_aliases[0]}) - - def _emit_aliased_event( - self, event: RelationChangedEvent, event_name: str, response: ResourceProviderModel - ): - """Emit all aliased events.""" - alias = self._get_relation_alias(event.relation.id) - if alias: - getattr(self.on, f"{alias}_{event_name}").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - - def _get_relation_alias(self, relation_id: int) -> str | None: - """Gets the relation alias for a relation id.""" - for relation in self.charm.model.relations[self.relation_name]: - if relation.id == relation_id: - return relation.data[self.charm.unit].get("alias") - return None - - ############################################################################## - # Event Handlers - ############################################################################## - - def _on_secret_changed_event(self, event: SecretChangedEvent): - """Event notifying about a new value of a secret.""" - if not event.secret.label: - return - relation = self._relation_from_secret_label(event.secret.label) - short_uuid = self._short_uuid_from_secret_label(event.secret.label) - - if not relation: - logging.info( - f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" - ) - return - - if relation.app == self.charm.app: - logging.info("Secret changed event ignored for Secret Owner") - return - - if relation.name != self.relation_name: - logging.info("Secret changed on wrong relation.") - return - - remote_unit = None - for unit in relation.units: - if unit.app != self.charm.app: - remote_unit = unit - break - - response_model = self.interface.build_model(relation.id) - if not short_uuid: - return - for _response in response_model.requests: - if _response.request_id == short_uuid: - response = _response - break - else: - logger.info(f"Unknown request id {short_uuid}") - return - - getattr(self.on, "authentication_updated").emit( - relation, - app=relation.app, - unit=remote_unit, - response=response, - ) - - def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: - """Event emitted when the database relation is created.""" - super()._on_relation_created_event(event) - - repository = OpsRelationRepository(self.model, event.relation, self.charm.app) - - # If relations aliases were provided, assign one to the relation. - self._assign_relation_alias(event.relation.id) - - if not self.charm.unit.is_leader(): - return - - # Generate all requests id so they are saved already. - for request in self._requests: - request.request_id = gen_hash(request.resource, request.salt) - - full_request = RequirerDataContractV1[self._request_model]( - version="v1", requests=self._requests - ) - write_model(repository, full_request) - - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the database relation has changed.""" - is_subordinate = False - remote_unit_data = None - for key in event.relation.data.keys(): - if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): - remote_unit_data = event.relation.data[key] - elif isinstance(key, Application) and key.name != self.charm.app.name: - is_subordinate = event.relation.data[key].get("subordinated") == "true" - - if is_subordinate: - if not remote_unit_data or remote_unit_data.get("state") != "ready": - return - - repository = self.interface.repository(event.relation.id, event.app) - response_model = self.interface.build_model(event.relation.id, component=event.app) - - if not response_model.requests: - logger.info("Still waiting for data.") - return - - data = repository.get_field("data") - if not data: - logger.info("Missing data to compute diffs") - return - - request_map = TypeAdapter(dict[str, self._request_model]).validate_json(data) - - for response in response_model.requests: - response_id = response.request_id or gen_hash(response.resource, response.salt) - request = request_map.get(response_id, None) - if not request: - raise ValueError( - f"No request matching the response with response_id {response_id}" - ) - self._handle_event(event, repository, request, response) - - ############################################################################## - # Methods to handle specificities of relation events - ############################################################################## - - @override - def _handle_event( - self, - event: RelationChangedEvent, - repository: OpsRelationRepository, - request: RequirerCommonModel, - response: ResourceProviderModel, - ): - _diff = self.compute_diff(event.relation, response, repository, store=True) - - for newval in _diff.added: - if secret_group := response._get_secret_field(newval): - uri = getattr(response, newval.replace("-", "_")) - repository.register_secret(uri, secret_group, response.request_id) - - if "secret-user" in _diff.added and not request.entity_type: - logger.info(f"resource {response.resource} created at {datetime.now()}") - getattr(self.on, "resource_created").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "resource_created", response) - return - - if "secret-entity" in _diff.added and request.entity_type: - logger.info(f"entity {response.entity_name} created at {datetime.now()}") - getattr(self.on, "resource_entity_created").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "resource_entity_created", response) - return - - if "endpoints" in _diff.added or "endpoints" in _diff.changed: - logger.info(f"endpoints changed at {datetime.now()}") - getattr(self.on, "endpoints_changed").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "endpoints_changed", response) - return - - if "read-only-endpoints" in _diff.added or "read-only-endpoints" in _diff.changed: - logger.info(f"read-only-endpoints changed at {datetime.now()}") - getattr(self.on, "read_only_endpoints_changed").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "read_only_endpoints_changed", response) - return diff --git a/tests/v1/integration/kafka-charm/lib/charms/data_platform_libs/v1/data_interfaces.py b/tests/v1/integration/kafka-charm/lib/charms/data_platform_libs/v1/data_interfaces.py deleted file mode 100644 index e22388d1..00000000 --- a/tests/v1/integration/kafka-charm/lib/charms/data_platform_libs/v1/data_interfaces.py +++ /dev/null @@ -1,2753 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -r"""Library to manage the relation for the data-platform products. - -This V1 has been specified in https://docs.google.com/document/d/1lnuonWnoQb36RWYwfHOBwU0VClLbawpTISXIC_yNKYo, and should be backward compatible with v0 clients. - -This library contains the Requires and Provides classes for handling the relation -between an application and multiple managed application supported by the data-team: -MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. - -#### Models - -This library exposes basic default models that can be used in most cases. -If you need more complex models, you can subclass them. - -```python -from charms.data_platform_libs.v1.data_interfaces import RequirerCommonModel, ExtraSecretStr - -class ExtendedCommonModel(RequirerCommonModel): - operator_password: ExtraSecretStr -``` - -Secret groups are handled using annotated types. If you wish to add extra secret groups, please follow the following model. The string metadata represents the secret group name, and `OptionalSecretStr` is a TypeAlias for `SecretStr | None`. Finally, `SecretStr` represents a field validating the URI pattern `secret:.*` - -```python -MyGroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] -``` - -Fields not specified as OptionalSecretStr and extended with a group name in the metadata will NOT get serialised. - - -#### Requirer Charm - -This library is a uniform interface to a selection of common database -metadata, with added custom events that add convenience to database management, -and methods to consume the application related data. - - -```python -from charms.data_platform_libs.v1.data_interfaces import ( - RequirerCommonModel, - RequirerDataContractV1, - ResourceCreatedEvent, - ResourceEntityCreatedEvent, - ResourceProviderModel, - ResourceRequirerEventHandler, -) - -class ClientCharm(CharmBase): - # Database charm that accepts connections from application charms. - def __init__(self, *args) -> None: - super().__init__(*args) - - requests = [ - RequirerCommonModel( - resource="clientdb", - ), - RequirerCommonModel( - resource="clientbis", - ), - RequirerCommonModel( - entity_type="USER", - ) - ] - self.database = ResourceRequirerEventHandler( - self,"database", requests, response_model=ResourceProviderModel - ) - self.framework.observe(self.database.on.resource_created, self._on_resource_created) - self.framework.observe(self.database.on.resource_entity_created, self._on_resource_entity_created) - - def _on_resource_created(self, event: ResourceCreatedEvent) -> None: - # Event triggered when a new database is created. - relation_id = event.relation.id - response = event.response # This is the response model - - username = event.response.username - password = event.response.password - ... - - def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: - # Event triggered when a new entity is created. - ... - -Compared to V1, this library makes heavy use of pydantic models, and allows for -multiple requests, specified as a list. -On the Requirer side, each response will trigger one custom event for that response. -This way, it allows for more strategic events to be emitted according to the request. - -As show above, the library provides some custom events to handle specific situations, which are listed below: -- resource_created: event emitted when the requested database is created. -- resource_entity_created: event emitted when the requested entity is created. -- endpoints_changed: event emitted when the read/write endpoints of the database have changed. -- read_only_endpoints_changed: event emitted when the read-only endpoints of the database - have changed. Event is not triggered if read/write endpoints changed too. - -If it is needed to connect multiple database clusters to the same relation endpoint -the application charm can implement the same code as if it would connect to only -one database cluster (like the above code example). - -To differentiate multiple clusters connected to the same relation endpoint -the application charm can use the name of the remote application: - -```python - -def _on_resource_created(self, event: ResourceCreatedEvent) -> None: - # Get the remote app name of the cluster that triggered this event - cluster = event.relation.app.name -``` - -It is also possible to provide an alias for each different database cluster/relation. - -So, it is possible to differentiate the clusters in two ways. -The first is to use the remote application name, i.e., `event.relation.app.name`, as above. - -The second way is to use different event handlers to handle each cluster events. -The implementation would be something like the following code: - -```python - -from charms.data_platform_libs.v1.data_interfaces import ( - RequirerCommonModel, - RequirerDataContractV1, - ResourceCreatedEvent, - ResourceEntityCreatedEvent, - ResourceProviderModel, - ResourceRequirerEventHandler, -) - -class ApplicationCharm(CharmBase): - # Application charm that connects to database charms. - - def __init__(self, *args): - super().__init__(*args) - - requests = [ - RequirerCommonModel( - resource="clientdb", - ), - RequirerCommonModel( - resource="clientbis", - ), - ] - # Define the cluster aliases and one handler for each cluster database created event. - self.database = ResourceRequirerEventHandler( - self, - relation_name="database" - relations_aliases = ["cluster1", "cluster2"], - requests= - ) - self.framework.observe( - self.database.on.cluster1_resource_created, self._on_cluster1_resource_created - ) - self.framework.observe( - self.database.on.cluster2_resource_created, self._on_cluster2_resource_created - ) - - def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: - # Handle the created database on the cluster named cluster1 - - # Create configuration file for app - config_file = self._render_app_config_file( - event.response.username, - event.response.password, - event.response.endpoints, - ) - ... - - def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: - # Handle the created database on the cluster named cluster2 - - # Create configuration file for app - config_file = self._render_app_config_file( - event.response.username, - event.response.password, - event.response.endpoints, - ) - ... -``` - -### Provider Charm - -Following an example of using the ResourceRequestedEvent, in the context of the -database charm code: - -```python -from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides - -class SampleCharm(CharmBase): - - def __init__(self, *args): - super().__init__(*args) - # Charm events defined in the database provides charm library. - self.provided_database = DatabaseProvides(self, relation_name="database") - self.framework.observe(self.provided_database.on.database_requested, - self._on_database_requested) - # Database generic helper - self.database = DatabaseHelper() - - def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: - # Handle the event triggered by a new database requested in the relation - # Retrieve the database name using the charm library. - db_name = event.database - # generate a new user credential - username = self.database.generate_user() - password = self.database.generate_password() - # set the credentials for the relation - self.provided_database.set_credentials(event.relation.id, username, password) - # set other variables for the relation event.set_tls("False") -``` - -As shown above, the library provides a custom event (database_requested) to handle -the situation when an application charm requests a new database to be created. -It's preferred to subscribe to this event instead of relation changed event to avoid -creating a new database when other information other than a database name is -exchanged in the relation databag. - -""" - -import copy -import hashlib -import json -import logging -import pickle -import random -import string -from abc import ABC, abstractmethod -from collections import namedtuple -from datetime import datetime -from enum import Enum -from typing import ( - Annotated, - Any, - ClassVar, - Generic, - Literal, - NewType, - TypeAlias, - TypeVar, - final, - overload, -) - -from ops import ( - CharmBase, - EventBase, - Model, - RelationChangedEvent, - RelationCreatedEvent, - RelationEvent, - Secret, - SecretChangedEvent, - SecretInfo, - SecretNotFoundError, -) -from ops.charm import CharmEvents -from ops.framework import EventSource, Handle, Object -from ops.model import Application, ModelError, Relation, Unit -from pydantic import ( - AfterValidator, - AliasChoices, - BaseModel, - ConfigDict, - Discriminator, - Field, - SecretStr, - SerializationInfo, - SerializerFunctionWrapHandler, - Tag, - TypeAdapter, - ValidationInfo, - model_serializer, - model_validator, -) -from pydantic.types import _SecretBase, _SecretField -from pydantic_core import CoreSchema, core_schema -from typing_extensions import TypeAliasType, override - -try: - import psycopg2 -except ImportError: - psycopg2 = None - -# The unique Charmhub library identifier, never change it -LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" - -# Increment this major API version when introducing breaking changes -LIBAPI = 1 - -# Increment this PATCH version before using `charmcraft publish-lib` or reset -# to 0 if you are raising the major API version -LIBPATCH = 0 - -PYDEPS = ["ops>=2.0.0", "pydantic>=2.11"] - -logger = logging.getLogger(__name__) - -MODEL_ERRORS = { - "not_leader": "this unit is not the leader", - "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", - "owner_no_refresh": "ERROR secret owner cannot use --refresh", -} - -RESOURCE_ALIASES = [ - "database", - "subject", - "topic", - "index", - "plugin-url", -] - -SECRET_PREFIX = "secret-" - - -############################################################################## -# Exceptions -############################################################################## - - -class DataInterfacesError(Exception): - """Common ancestor for DataInterfaces related exceptions.""" - - -class SecretError(DataInterfacesError): - """Common ancestor for Secrets related exceptions.""" - - -class SecretAlreadyExistsError(SecretError): - """A secret that was to be added already exists.""" - - -class SecretsUnavailableError(SecretError): - """Secrets aren't yet available for Juju version used.""" - - -class IllegalOperationError(DataInterfacesError): - """To be used when an operation is not allowed to be performed.""" - - -############################################################################## -# Global helpers / utilities -############################################################################## - - -def gen_salt() -> str: - """Generates a consistent salt.""" - return "".join(random.choices(string.ascii_letters + string.digits, k=16)) - - -def gen_hash(resource_name: str, salt: str) -> str: - """Generates a consistent hash based on the resource name and salt.""" - hasher = hashlib.sha256() - hasher.update(f"{resource_name}:{salt}".encode()) - return hasher.hexdigest()[:16] - - -def ensure_leader_for_app(f): - """Decorator to ensure that only leader can perform given operation.""" - - def wrapper(self, *args, **kwargs): - if self.component == self._local_app and not self._local_unit.is_leader(): - logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") - return - return f(self, *args, **kwargs) - - wrapper.leader_only = True - return wrapper - - -def get_encoded_dict( - relation: Relation, member: Unit | Application, field: str -) -> dict[str, Any] | None: - """Retrieve and decode an encoded field from relation data.""" - data = json.loads(relation.data[member].get(field, "{}")) - if isinstance(data, dict): - return data - logger.error("Unexpected datatype for %s instead of dict.", str(data)) - - -Diff = namedtuple("Diff", ["added", "changed", "deleted"]) -Diff.__doc__ = """ -A tuple for storing the diff between two data mappings. - -added - keys that were added -changed - keys that still exist but have new values -deleted - key that were deleted""" - - -def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: - """Retrieves the diff of the data in the relation changed databag for v1. - - Args: - old_data: dictionary of the stored data before the event. - new_data: dictionary of the received data to compute the diff. - - Returns: - a Diff instance containing the added, deleted and changed - keys from the event relation databag. - """ - old_data = old_data or {} - - # These are the keys that were added to the databag and triggered this event. - added = new_data.keys() - old_data.keys() - # These are the keys that were removed from the databag and triggered this event. - deleted = old_data.keys() - new_data.keys() - # These are the keys that already existed in the databag, - # but had their values changed. - changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} - # Return the diff with all possible changes. - return Diff(added, changed, deleted) - - -def resource_added(diff: Diff) -> bool: - """Ensures that one of the aliased resources has been added.""" - return any(item in diff.added for item in RESOURCE_ALIASES + ["resource"]) - - -def store_new_data( - relation: Relation, - component: Unit | Application, - new_data: dict[str, str], - short_uuid: str | None = None, -): - """Stores the new data in the databag for diff computation.""" - # First, the case for V0 - if not short_uuid: - relation.data[component].update({"data": json.dumps(new_data)}) - # Then the case for V1, where we have a ShortUUID - else: - data = json.loads(relation.data[component].get("data", "{}")) - if not isinstance(data, dict): - raise ValueError - newest_data = copy.deepcopy(data) - newest_data[short_uuid] = new_data - relation.data[component].update({"data": json.dumps(newest_data)}) - - -############################################################################## -# Helper classes -############################################################################## - -SecretGroup = NewType("SecretGroup", str) - - -SecretString = TypeAliasType("SecretString", Annotated[str, Field(pattern="secret:.*")]) - - -class SecretBool(_SecretField[bool]): - """Class for booleans as secrets.""" - - _inner_schema: ClassVar[CoreSchema] = core_schema.bool_schema() - _error_kind: ClassVar[str] = "bool_type" - - def _display(self) -> str: - return "****" - - -OptionalSecretStr: TypeAlias = SecretStr | None -OptionalSecretBool: TypeAlias = SecretBool | None - -OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) - -UserSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "user"] -TlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "tls"] -TlsSecretBool = Annotated[OptionalSecretBool, Field(exclude=True, default=None), "tls"] -MtlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mtls"] -ExtraSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "extra"] -EntitySecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "entity"] - - -class Scope(Enum): - """Peer relations scope.""" - - APP = "app" - UNIT = "unit" - - -class CachedSecret: - """Locally cache a secret. - - The data structure is precisely reusing/simulating as in the actual Secret Storage - """ - - KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] - - def __init__( - self, - model: Model, - component: Application | Unit, - label: str, - secret_uri: str | None = None, - ): - self._secret_meta = None - self._secret_content = {} - self._secret_uri = secret_uri - self.label = label - self._model = model - self.component = component - self.current_label = None - - @property - def meta(self) -> Secret | None: - """Getting cached secret meta-information.""" - if not self._secret_meta: - if not (self._secret_uri or self.label): - return - - try: - self._secret_meta = self._model.get_secret(label=self.label) - except SecretNotFoundError: - # Falling back to seeking for potential legacy labels - logger.info(f"Secret with label {self.label} not found") - - # If still not found, to be checked by URI, to be labelled with the proposed label - if not self._secret_meta and self._secret_uri: - self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) - return self._secret_meta - - ########################################################################## - # Public functions - ########################################################################## - - def add_secret( - self, - content: dict[str, str], - relation: Relation | None = None, - label: str | None = None, - ) -> Secret: - """Create a new secret.""" - if self._secret_uri: - raise SecretAlreadyExistsError( - "Secret is already defined with uri %s", self._secret_uri - ) - - label = self.label if not label else label - - secret = self.component.add_secret(content, label=label) - if relation and relation.app != self._model.app: - # If it's not a peer relation, grant is to be applied - secret.grant(relation) - self._secret_uri = secret.id - self._secret_meta = secret - return self._secret_meta - - def get_content(self) -> dict[str, str]: - """Getting cached secret content.""" - if not self._secret_content: - if self.meta: - try: - self._secret_content = self.meta.get_content(refresh=True) - except (ValueError, ModelError) as err: - # https://bugs.launchpad.net/juju/+bug/2042596 - # Only triggered when 'refresh' is set - if isinstance(err, ModelError) and not any( - msg in str(err) for msg in self.KNOWN_MODEL_ERRORS - ): - raise - # Due to: ValueError: Secret owner cannot use refresh=True - self._secret_content = self.meta.get_content() - return self._secret_content - - def set_content(self, content: dict[str, str]) -> None: - """Setting cached secret content.""" - if not self.meta: - return - - if content == self.get_content(): - return - - if content: - self.meta.set_content(content) - self._secret_content = content - else: - self.meta.remove_all_revisions() - - def get_info(self) -> SecretInfo | None: - """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" - if self.meta: - return self.meta.get_info() - - def remove(self) -> None: - """Remove secret.""" - if not self.meta: - raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") - try: - self.meta.remove_all_revisions() - except SecretNotFoundError: - pass - self._secret_content = {} - self._secret_meta = None - self._secret_uri = None - - -class SecretCache: - """A data structure storing CachedSecret objects.""" - - def __init__(self, model: Model, component: Application | Unit): - self._model = model - self.component = component - self._secrets: dict[str, CachedSecret] = {} - - def get(self, label: str, uri: str | None = None) -> CachedSecret | None: - """Getting a secret from Juju Secret store or cache.""" - if not self._secrets.get(label): - secret = CachedSecret(self._model, self.component, label, uri) - if secret.meta: - self._secrets[label] = secret - return self._secrets.get(label) - - def add(self, label: str, content: dict[str, str], relation: Relation) -> CachedSecret: - """Adding a secret to Juju Secret.""" - if self._secrets.get(label): - raise SecretAlreadyExistsError(f"Secret {label} already exists") - - secret = CachedSecret(self._model, self.component, label) - secret.add_secret(content, relation) - self._secrets[label] = secret - return self._secrets[label] - - def remove(self, label: str) -> None: - """Remove a secret from the cache.""" - if secret := self.get(label): - try: - secret.remove() - self._secrets.pop(label) - except (SecretsUnavailableError, KeyError): - pass - else: - return - logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) - - -############################################################################## -# Models classes -############################################################################## - - -class PeerModel(BaseModel): - """Common Model for all peer relations.""" - - model_config = ConfigDict( - validate_by_name=True, - validate_by_alias=True, - populate_by_name=True, - serialize_by_alias=True, - alias_generator=lambda x: x.replace("_", "-"), - extra="allow", - ) - - @model_validator(mode="after") - def extract_secrets(self, info: ValidationInfo): - """Extract all secret_fields into their local field.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing as we're lacking context here.") - return self - repository: AbstractRepository = info.context.get("repository") - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = SecretGroup(field_info.metadata[0]) - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret = repository.get_secret(secret_group, secret_uri=None) - - if not secret: - logger.info(f"No secret for group {secret_group}") - continue - - value = secret.get_content().get(aliased_field) - - if value and field_info.annotation == OptionalSecretBool: - value = SecretBool(json.loads(value)) - elif value: - value = SecretStr(value) - setattr(self, field, value) - - return self - - @model_serializer(mode="wrap") - def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): - """Serializes the model writing the secrets in their respective secrets.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing serialization as we're lacking context here.") - return handler(self) - repository: AbstractRepository = info.context.get("repository") - - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = SecretGroup(field_info.metadata[0]) - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret = repository.get_secret(secret_group, secret_uri=None) - - value = getattr(self, field) - - actual_value = ( - value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value - ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) - - if secret is None: - if value: - secret = repository.add_secret( - aliased_field, - actual_value, - secret_group, - ) - if not secret or not secret.meta: - raise SecretError("No secret to send back") - continue - - content = secret.get_content() - full_content = copy.deepcopy(content) - - if value is None: - full_content.pop(aliased_field, None) - else: - full_content.update({aliased_field: actual_value}) - secret.set_content(full_content) - return handler(self) - - -class CommonModel(BaseModel): - """Common Model for both requirer and provider. - - request_id stores the request identifier for easier access. - resource is the requested resource. - """ - - model_config = ConfigDict( - validate_by_name=True, - validate_by_alias=True, - populate_by_name=True, - serialize_by_alias=True, - alias_generator=lambda x: x.replace("_", "-"), - extra="allow", - ) - - resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") - request_id: str | None = Field(default=None) - salt: str = Field( - description="This salt is used to create unique hashes even when other fields map 1-1", - default_factory=gen_salt, - ) - - @model_validator(mode="after") - def extract_secrets(self, info: ValidationInfo): - """Extract all secret_fields into their local field.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing as we're lacking context here.") - return self - repository: AbstractRepository = info.context.get("repository") - short_uuid = self.request_id or gen_hash(self.resource, self.salt) - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = field_info.metadata[0] - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret_field = repository.secret_field(secret_group, aliased_field).replace( - "-", "_" - ) - secret_uri: str | None = getattr(self, secret_field, None) - - if not secret_uri: - continue - - secret = repository.get_secret( - secret_group, secret_uri=secret_uri, short_uuid=short_uuid - ) - - if not secret: - logger.info(f"No secret for group {secret_group} and short uuid {short_uuid}") - continue - - value = secret.get_content().get(aliased_field) - if value and field_info.annotation == OptionalSecretBool: - value = SecretBool(json.loads(value)) - elif value: - value = SecretStr(value) - - setattr(self, field, value) - return self - - @model_serializer(mode="wrap") - def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): - """Serializes the model writing the secrets in their respective secrets.""" - _encountered_secrets: set[tuple[CachedSecret, str]] = set() - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing serialization as we're lacking context here.") - return handler(self) - repository: AbstractRepository = info.context.get("repository") - short_uuid = self.request_id or gen_hash(self.resource, self.salt) - # Backward compatibility for v0 regarding secrets. - if info.context.get("version") == "v0": - short_uuid = None - - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = field_info.metadata[0] - if not secret_group: - raise SecretsUnavailableError(field) - aliased_field = field_info.serialization_alias or field - secret_field = repository.secret_field(secret_group, aliased_field).replace( - "-", "_" - ) - secret_uri: str | None = getattr(self, secret_field, None) - secret = repository.get_secret( - secret_group, secret_uri=secret_uri, short_uuid=short_uuid - ) - - value = getattr(self, field) - - actual_value = ( - value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value - ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) - - if secret is None: - if value: - secret = repository.add_secret( - aliased_field, actual_value, secret_group, short_uuid - ) - if not secret or not secret.meta: - raise SecretError("No secret to send back") - setattr(self, secret_field, secret.meta.id) - continue - - content = secret.get_content() - full_content = copy.deepcopy(content) - - if value is None: - full_content.pop(aliased_field, None) - _encountered_secrets.add((secret, secret_field)) - else: - full_content.update({aliased_field: actual_value}) - secret.set_content(full_content) - - # Delete all empty secrets and clean up their fields. - for secret, secret_field in _encountered_secrets: - if not secret.get_content(): - # Setting a field to '' deletes it - setattr(self, secret_field, "") - repository.delete_secret(secret.label) - - return handler(self) - - @classmethod - def _get_secret_field(cls, field: str) -> SecretGroup | None: - """Checks if the field is a secret uri or not.""" - if not field.startswith(SECRET_PREFIX): - return None - - value = field.split("-")[1] - if info := cls.__pydantic_fields__.get(field.replace("-", "_")): - if info.annotation == SecretString: - return SecretGroup(value) - return None - - -class EntityPermissionModel(BaseModel): - """Entity Permissions Model.""" - - resource_name: str - resource_type: str - privileges: list - - -class RequirerCommonModel(CommonModel): - """Requirer side of the request model. - - extra_user_roles is used to request more roles for that user. - external_node_connectivity is used to indicate that the URI should be made for external clients when True - """ - - extra_user_roles: str | None = Field(default=None) - extra_group_roles: str | None = Field(default=None) - external_node_connectivity: bool = Field(default=False) - entity_type: Literal["USER", "GROUP"] | None = Field(default=None) - entity_permissions: list[EntityPermissionModel] | None = Field(default=None) - secret_mtls: SecretString | None = Field(default=None) - mtls_cert: MtlsSecretStr = Field(default=None) - - @model_validator(mode="after") - def validate_fields(self): - """Validates that no inconsistent request is being sent.""" - if self.entity_type and self.entity_type not in ["USER", "GROUP"]: - raise ValueError("Invalid entity-type. Possible values are USER and GROUP") - - if self.entity_type == "USER" and self.extra_group_roles: - raise ValueError("Inconsistent entity information. Use extra_user_roles instead") - - if self.entity_type == "GROUP" and self.extra_user_roles: - raise ValueError("Inconsistent entity information. Use extra_group_roles instead") - - return self - - -class ProviderCommonModel(CommonModel): - """Serialized fields added to the databag. - - endpoints stores the endpoints exposed to that client. - secret_user is a secret URI mapping to the user credentials - secret_tls is a secret URI mapping to the TLS certificate - secret_extra is a secret URI for all additional secrets requested. - """ - - endpoints: str | None = Field(default=None) - read_only_endpoints: str | None = Field(default=None) - secret_user: SecretString | None = Field(default=None) - secret_tls: SecretString | None = Field(default=None) - secret_extra: SecretString | None = Field(default=None) - secret_entity: SecretString | None = Field(default=None) - - -class ResourceProviderModel(ProviderCommonModel): - """Extended model including the deserialized fields.""" - - username: UserSecretStr = Field(default=None) - password: UserSecretStr = Field(default=None) - uris: UserSecretStr = Field(default=None) - read_only_uris: UserSecretStr = Field(default=None) - tls: TlsSecretBool = Field(default=None) - tls_ca: TlsSecretStr = Field(default=None) - entity_name: EntitySecretStr = Field(default=None) - entity_password: EntitySecretStr = Field(default=None) - version: str | None = Field(default=None) - - -class RequirerDataContractV0(RequirerCommonModel): - """Backward compatibility.""" - - version: Literal["v0"] = Field(default="v0") - - original_field: str = Field(exclude=True, default="") - - @model_validator(mode="before") - @classmethod - def ensure_original_field(cls, data: Any): - """Ensures that we keep the original field.""" - if isinstance(data, dict): - for alias in RESOURCE_ALIASES: - if data.get(alias) is not None: - data["original_field"] = alias - break - else: - for alias in RESOURCE_ALIASES: - if getattr(data, alias) is not None: - data.original_field = alias - return data - - -TResourceProviderModel = TypeVar("TResourceProviderModel", bound=ResourceProviderModel) -TRequirerCommonModel = TypeVar("TRequirerCommonModel", bound=RequirerCommonModel) - - -class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): - """The new Data Contract.""" - - version: Literal["v1"] = Field(default="v1") - requests: list[TRequirerCommonModel] - - -def discriminate_on_version(payload: Any) -> str: - """Use the version to discriminate.""" - if isinstance(payload, dict): - return payload.get("version", "v0") - return getattr(payload, "version", "v0") - - -RequirerDataContractType = Annotated[ - Annotated[RequirerDataContractV0, Tag("v0")] | Annotated[RequirerDataContractV1, Tag("v1")], - Discriminator(discriminate_on_version), -] - - -RequirerDataContract = TypeAdapter(RequirerDataContractType) - - -class DataContractV0(ResourceProviderModel): - """The Data contract of the response, for V0.""" - - -class DataContractV1(BaseModel, Generic[TResourceProviderModel]): - """The Data contract of the response, for V1.""" - - version: Literal["v1"] = Field(default="v1") - requests: list[TResourceProviderModel] = Field(default_factory=list) - - -DataContact = TypeAdapter(DataContractV1[ResourceProviderModel]) - - -TCommonModel = TypeVar("TCommonModel", bound=CommonModel) - - -def is_topic_value_acceptable(value: str | None) -> str | None: - """Check whether the given Kafka topic value is acceptable.""" - if value and "*" in value[:3]: - raise ValueError(f"Error on topic '{value}',, unacceptable value.") - return value - - -class KafkaRequestModel(RequirerCommonModel): - """Specialised model for Kafka.""" - - consumer_group_prefix: Annotated[str | None, AfterValidator(is_topic_value_acceptable)] = ( - Field(default=None) - ) - - -class KafkaResponseModel(ResourceProviderModel): - """Kafka response model.""" - - consumer_group_prefix: ExtraSecretStr = Field(default=None) - zookeeper_uris: ExtraSecretStr = Field(default=None) - - -############################################################################## -# AbstractRepository class -############################################################################## - - -class AbstractRepository(ABC): - """Abstract repository interface.""" - - @abstractmethod - def get_secret( - self, secret_group, secret_uri: str | None, short_uuid: str | None = None - ) -> CachedSecret | None: - """Gets a secret from the secret cache by uri or label.""" - ... - - @abstractmethod - def get_secret_field( - self, - field: str, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> str | None: - """Gets a value for a field stored in a secret group.""" - ... - - @abstractmethod - def get_field(self, field: str) -> str | None: - """Gets the value for one field.""" - ... - - @abstractmethod - def get_fields(self, *fields: str) -> dict[str, str | None]: - """Gets the values for all provided fields.""" - ... - - @abstractmethod - def write_field(self, field: str, value: Any) -> None: - """Writes the value in the field, without any secret support.""" - ... - - @abstractmethod - def write_fields(self, mapping: dict[str, Any]) -> None: - """Writes the values of mapping in the fields without any secret support (keys of mapping).""" - ... - - def write_secret_field( - self, field: str, value: Any, group: SecretGroup - ) -> CachedSecret | None: - """Writes a secret field.""" - ... - - @abstractmethod - def add_secret( - self, - field: str, - value: Any, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> CachedSecret | None: - """Gets a value for a field stored in a secret group.""" - ... - - @abstractmethod - def delete_secret(self, label: str): - """Deletes a secret by its label.""" - ... - - @abstractmethod - def delete_field(self, field: str) -> None: - """Deletes a field.""" - ... - - @abstractmethod - def delete_fields(self, *fields: str) -> None: - """Deletes all the provided fields.""" - ... - - @abstractmethod - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - """Delete a field stored in a secret group.""" - ... - - @abstractmethod - def register_secret(self, secret_group: SecretGroup, short_uuid: str | None = None) -> None: - """Registers a secret using the repository.""" - ... - - @abstractmethod - def get_data(self) -> dict[str, Any] | None: - """Gets the whole data.""" - ... - - @abstractmethod - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Builds a secret field.""" - - -class OpsRepository(AbstractRepository): - """Implementation for ops repositories, with some methods left out.""" - - SECRET_FIELD_NAME: str - - IGNORES_GROUPS: list[SecretGroup] = [] - - uri_to_databag: bool = True - - def __init__( - self, - model: Model, - relation: Relation | None, - component: Unit | Application, - ): - self._local_app = model.app - self._local_unit = model.unit - self.relation = relation - self.component = component - self.model = model - self.secrets = SecretCache(model, component) - - @abstractmethod - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None - ) -> str: - """Generate unique group mapping for secrets within a relation context.""" - ... - - @override - def get_data(self) -> dict[str, Any] | None: - ret: dict[str, Any] = {} - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - for key, value in self.relation.data[self.component].items(): - try: - ret[key] = json.loads(value) - except json.JSONDecodeError: - ret[key] = value - - return ret - - @override - @ensure_leader_for_app - def get_field( - self, - field: str, - ) -> str | None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - relation_data = self.relation.data[self.component] - return relation_data.get(field) - - @override - @ensure_leader_for_app - def get_fields(self, *fields: str) -> dict[str, str]: - res = {} - for field in fields: - if (value := self.get_field(field)) is not None: - res[field] = value - return res - - @override - @ensure_leader_for_app - def write_field(self, field: str, value: Any) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - if not value: - return None - self.relation.data[self.component].update({field: value}) - - @override - @ensure_leader_for_app - def write_fields(self, mapping: dict[str, Any]) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - (self.write_field(field, value) for field, value in mapping.items()) - - @override - @ensure_leader_for_app - def write_secret_field( - self, field: str, value: Any, secret_group: SecretGroup - ) -> CachedSecret | None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - label = self._generate_secret_label(self.relation, secret_group) - secret_uri = self.get_field(self.secret_field(secret_group, field)) - - secret = self.secrets.get(label=label, uri=secret_uri) - if not secret: - return self.add_secret(field, value, secret_group) - else: - content = secret.get_content() - full_content = copy.deepcopy(content) - full_content.update({field: value}) - secret.set_content(full_content) - return secret - - @override - @ensure_leader_for_app - def delete_field(self, field: str) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - relation_data = self.relation.data[self.component] - try: - relation_data.pop(field) - except KeyError: - logger.debug( - f"Non existent field {field} was attempted to be removed from the databag (relation ID: {self.relation.id})" - ) - - @override - @ensure_leader_for_app - def delete_fields(self, *fields: str) -> None: - (self.delete_field(field) for field in fields) - - @override - @ensure_leader_for_app - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - relation_data = self.relation.data[self.component] - secret_field = self.secret_field(secret_group, field) - - label = self._generate_secret_label(self.relation, secret_group) - secret_uri = relation_data.get(secret_field) - - secret = self.secrets.get(label=label, uri=secret_uri) - - if not secret: - logging.error(f"Can't delete secret for relation {self.relation.id}") - return None - - content = secret.get_content() - new_content = copy.deepcopy(content) - try: - new_content.pop(field) - except KeyError: - logging.debug( - f"Non-existing secret '{field}' was attempted to be removed" - f"from relation {self.relation.id} and group {secret_group}" - ) - - # Write the new secret content if necessary - if new_content: - secret.set_content(new_content) - return - - # Remove the secret from the relation if it's fully gone. - try: - relation_data.pop(field) - except KeyError: - pass - self.secrets.remove(label) - return - - @ensure_leader_for_app - def register_secret(self, uri: str, secret_group: SecretGroup, short_uuid: str | None = None): - """Registers the secret group for this relation. - - [MAGIC HERE] - If we fetch a secret using get_secret(id=, label=), - then will be "stuck" on the Secret object, whenever it may - appear (i.e. as an event attribute, or fetched manually) on future occasions. - - This will allow us to uniquely identify the secret on Provider side (typically on - 'secret-changed' events), and map it to the corresponding relation. - """ - if not self.relation: - raise ValueError("Cannot register without relation.") - - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - CachedSecret(self.model, self.component, label, uri).meta - - @override - def get_secret( - self, secret_group, secret_uri: str | None, short_uuid: str | None = None - ) -> CachedSecret | None: - """Gets a secret from the secret cache by uri or label.""" - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - return None - - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - - return self.secrets.get(label=label, uri=secret_uri) - - @override - def get_secret_field( - self, - field: str, - secret_group: SecretGroup, - uri: str | None = None, - short_uuid: str | None = None, - ) -> Any | None: - """Gets a value for a field stored in a secret group.""" - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - return None - - secret_field = self.secret_field(secret_group, field) - - relation_data = self.relation.data[self.component] - secret_uri = uri or relation_data.get(secret_field) - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - - if self.uri_to_databag and not secret_uri: - logger.info(f"No secret for group {secret_group} in relation {self.relation}") - return None - - secret = self.secrets.get(label=label, uri=secret_uri) - - if not secret: - logger.info(f"No secret for group {secret_group} in relation {self.relation}") - return None - - content = secret.get_content().get(field) - - if not content: - return - - try: - return json.loads(content) - except json.JSONDecodeError: - return content - - @override - @ensure_leader_for_app - def add_secret( - self, - field: str, - value: Any, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> CachedSecret | None: - if not self.relation: - logger.info("No relation to get value from") - return None - - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - - label = self._generate_secret_label(self.relation, secret_group, short_uuid) - - secret = self.secrets.add(label, {field: value}, self.relation) - - if not secret.meta or not secret.meta.id: - logging.error("Secret is missing Secret ID") - raise SecretError("Secret added but is missing Secret ID") - - return secret - - @override - @ensure_leader_for_app - def delete_secret(self, label: str) -> None: - self.secrets.remove(label) - - -@final -class OpsRelationRepository(OpsRepository): - """Implementation of the Abstract Repository for non peer relations.""" - - SECRET_FIELD_NAME: str = "secret" - - @override - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None - ) -> str: - """Generate unique group_mappings for secrets within a relation context.""" - if short_uuid: - return f"{relation.name}.{relation.id}.{short_uuid}.{secret_group}.secret" - return f"{relation.name}.{relation.id}.{secret_group}.secret" - - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Generates the field name to store in the peer relation.""" - return f"{self.SECRET_FIELD_NAME}-{secret_group}" - - -class OpsPeerRepository(OpsRepository): - """Implementation of the Ops Repository for peer relations.""" - - SECRET_FIELD_NAME = "internal_secret" - - IGNORES_GROUPS = [ - SecretGroup("user"), - SecretGroup("entity"), - SecretGroup("mtls"), - SecretGroup("tls"), - ] - - uri_to_databag: bool = False - - @property - def scope(self) -> Scope: - """Returns a scope.""" - if isinstance(self.component, Application): - return Scope.APP - if isinstance(self.component, Unit): - return Scope.UNIT - raise ValueError("Invalid component, neither a Unit nor an Application") - - @override - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None - ) -> str: - """Generate unique group_mappings for secrets within a relation context.""" - members = [relation.name, self._local_app.name, self.scope.value] - - if secret_group != SecretGroup("extra"): - members.append(secret_group) - return f"{'.'.join(members)}" - - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Generates the field name to store in the peer relation.""" - if not field: - raise ValueError("Must have a field.") - return f"{field}@{secret_group}" - - -@final -class OpsPeerUnitRepository(OpsPeerRepository): - """Implementation for a unit.""" - - @override - def __init__(self, model: Model, relation: Relation | None, component: Unit): - super().__init__(model, relation, component) - - -@final -class OpsOtherPeerUnitRepository(OpsPeerRepository): - """Implementation for a remote unit.""" - - @override - def __init__(self, model: Model, relation: Relation | None, component: Unit): - if component == model.unit: - raise ValueError(f"Can't instantiate {self.__class__.__name__} with local unit.") - super().__init__(model, relation, component) - - @override - def write_field(self, field: str, value: Any) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def write_fields(self, mapping: dict[str, Any]) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def add_secret( - self, field: str, value: Any, secret_group: SecretGroup, short_uuid: str | None = None - ) -> CachedSecret | None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_field(self, field: str) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_fields(self, *fields: str) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - -TRepository = TypeVar("TRepository", bound=OpsRepository) -TCommon = TypeVar("TCommon", bound=BaseModel) -TPeerCommon = TypeVar("TPeerCommon", bound=PeerModel) -TCommonBis = TypeVar("TCommonBis", bound=BaseModel) - - -class RepositoryInterface(Generic[TRepository, TCommon]): - """Repository builder.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - component: Unit | Application, - repository_type: type[TRepository], - model: type[TCommon] | TypeAdapter | None, - ): - self.charm = charm - self._model = charm.model - self.repository_type = repository_type - self.relation_name = relation_name - self.model = model - self.component = component - - @property - def relations(self) -> list[Relation]: - """The list of Relation instances associated with this relation name.""" - return self._model.relations[self.relation_name] - - def repository( - self, relation_id: int, component: Unit | Application | None = None - ) -> TRepository: - """Returns a repository for the relation.""" - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - return self.repository_type(self._model, relation, component or self.component) - - @overload - def build_model( - self, - relation_id: int, - model: type[TCommonBis], - component: Unit | Application | None = None, - ) -> TCommonBis: ... - - @overload - def build_model( - self, - relation_id: int, - model: type[TCommon], - component: Unit | Application | None = None, - ) -> TCommon: ... - - @overload - def build_model( - self, - relation_id: int, - model: TypeAdapter[TCommonBis], - component: Unit | Application | None = None, - ) -> TCommonBis: ... - - @overload - def build_model( - self, - relation_id: int, - model: None = None, - component: Unit | Application | None = None, - ) -> TCommon: ... - - def build_model( - self, - relation_id: int, - model: type[TCommon] | TypeAdapter[TCommonBis] | None = None, - component: Unit | Application | None = None, - ) -> TCommon | TCommonBis: - """Builds a model using the repository for that relation.""" - model = model or self.model # First the provided model (allows for specialisation) - component = component or self.component - if not model: - raise ValueError("Missing model to specialise data") - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - return build_model(self.repository_type(self._model, relation, component), model) - - def write_model( - self, relation_id: int, model: BaseModel, context: dict[str, str] | None = None - ): - """Writes the model using the repository.""" - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - - write_model( - self.repository_type(self._model, relation, self.component), model, context=context - ) - - -class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, TCommon]): - """Specialised Interface to build repositories for app peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.app, OpsRelationRepository, model) - - -class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): - """Specialised Interface to build repositories for app peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.app, OpsPeerRepository, model) - - -class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): - """Specialised Interface to build repositories for this unit peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.unit, OpsPeerUnitRepository, model) - - -class OpsOtherPeerUnitRepositoryInterface( - RepositoryInterface[OpsOtherPeerUnitRepository, TPeerCommon] -): - """Specialised Interface to build repositories for another unit peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - unit: Unit, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, unit, OpsOtherPeerUnitRepository, model) - - -############################################################################## -# DDD implementation methods -############################################################################## -############################################################################## - - -def build_model(repository: AbstractRepository, model: type[TCommon] | TypeAdapter) -> TCommon: - """Builds a common model using the provided repository and provided model structure.""" - data = repository.get_data() or {} - - data.pop("data", None) - - # Beware this means all fields should have a default value here. - if isinstance(model, TypeAdapter): - return model.validate_python(data, context={"repository": repository}) - - return model.model_validate(data, context={"repository": repository}) - - -def write_model( - repository: AbstractRepository, model: BaseModel, context: dict[str, str] | None = None -): - """Writes the data stored in the model using the repository object.""" - context = context or {} - dumped = model.model_dump( - mode="json", context={"repository": repository} | context, exclude_none=False - ) - for field, value in dumped.items(): - if value is None: - repository.delete_field(field) - continue - dumped_value = value if isinstance(value, str) else json.dumps(value) - repository.write_field(field, dumped_value) - - -############################################################################## -# Custom Events -############################################################################## - - -class ResourceProviderEvent(EventBase, Generic[TRequirerCommonModel]): - """Resource requested event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, request - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - request: TRequirerCommonModel, - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.request = request - - def snapshot(self) -> dict[str, Any]: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["request"] = pickle.dumps(self.request) - return snapshot - - def restore(self, snapshot: dict[str, Any]): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - self.request = pickle.loads(snapshot["request"]) - - -class ResourceRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource requested event.""" - - pass - - -class ResourceEntityRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource Entity requested event.""" - - pass - - -class ResourceEntityPermissionsChangedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource entity permissions changed event.""" - - pass - - -class MtlsCertUpdatedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource entity permissions changed event.""" - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - request: TRequirerCommonModel, - old_mtls_cert: str | None = None, - ): - super().__init__(handle, relation, app, unit, request) - - self.old_mtls_cert = old_mtls_cert - - def snapshot(self): - """Return a snapshot of the event.""" - return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} - - def restore(self, snapshot): - """Restore the event from a snapshot.""" - super().restore(snapshot) - self.old_mtls_cert = snapshot["old_mtls_cert"] - - -class BulkResourcesRequestedEvent(EventBase, Generic[TRequirerCommonModel]): - """Resource requested event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, request - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - requests: list[TRequirerCommonModel], - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.requests = requests - - def snapshot(self) -> dict[str, Any]: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["requests"] = [pickle.dumps(request) for request in self.requests] - return snapshot - - def restore(self, snapshot: dict[str, Any]): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - self.requests = [pickle.loads(request) for request in snapshot["requests"]] - - -class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): - """Database events. - - This class defines the events that the database can emit. - """ - - bulk_resources_requested = EventSource(BulkResourcesRequestedEvent) - resource_requested = EventSource(ResourceRequestedEvent) - resource_entity_requested = EventSource(ResourceEntityRequestedEvent) - resource_entity_permissions_changed = EventSource(ResourceEntityPermissionsChangedEvent) - mtls_cert_updated = EventSource(MtlsCertUpdatedEvent) - - -class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): - """Resource created/changed event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, response - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - response: TResourceProviderModel, - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.response = response - - def snapshot(self) -> dict: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["response"] = pickle.dumps(self.response) - return snapshot - - def restore(self, snapshot: dict): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - - self.response = pickle.loads(snapshot["response"]) - - -class ResourceCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Resource has been created.""" - - pass - - -class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Resource entity has been created.""" - - pass - - -class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read/Write enpoints are changed.""" - - pass - - -class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read-only enpoints are changed.""" - - pass - - -class AuthenticationUpdatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Authentication was updated for a user.""" - - pass - - -class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): - """Database events. - - This class defines the events that the database can emit. - """ - - resource_created = EventSource(ResourceCreatedEvent) - resource_entity_created = EventSource(ResourceEntityCreatedEvent) - endpoints_changed = EventSource(ResourceEndpointsChangedEvent) - read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) - authentication_updated = EventSource(AuthenticationUpdatedEvent) - - -############################################################################## -# Event Handlers -############################################################################## - - -class EventHandlers(Object): - """Requires-side of the relation.""" - - component: Application | Unit - interface: RepositoryInterface - - def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ""): - """Manager of base client relations.""" - if not unique_key: - unique_key = relation_name - super().__init__(charm, unique_key) - - self.charm = charm - self.relation_name = relation_name - - self.framework.observe( - charm.on[self.relation_name].relation_changed, - self._on_relation_changed_event, - ) - - self.framework.observe( - self.charm.on[self.relation_name].relation_created, - self._on_relation_created_event, - ) - - self.framework.observe( - charm.on.secret_changed, - self._on_secret_changed_event, - ) - - @property - def relations(self) -> list[Relation]: - """Shortcut to get access to the relations.""" - return self.interface.relations - - # Event handlers - - def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: - """Event emitted when the relation is created.""" - pass - - @abstractmethod - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - - @abstractmethod - def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - - @abstractmethod - def _handle_event( - self, - ): - """Handles the event and reacts accordingly.""" - pass - - def compute_diff( - self, - relation: Relation, - request: RequirerCommonModel | ResourceProviderModel, - repository: AbstractRepository | None = None, - store: bool = True, - ) -> Diff: - """Computes, stores and returns a diff for that request.""" - if not repository: - repository = OpsRelationRepository(self.model, relation, component=relation.app) - - # Gets the data stored in the databag for diff computation - old_data = get_encoded_dict(relation, self.component, "data") - - # In case we're V1, we select specifically this request - if old_data and request.request_id: - old_data: dict | None = old_data.get(request.request_id, None) - - # dump the data of the current request so we can compare - new_data = request.model_dump( - mode="json", - exclude={"data"}, - exclude_none=True, - exclude_defaults=True, - ) - - # Computes the diff - _diff = diff(old_data, new_data) - - if store: - # Update the databag with the new data for later diff computations - store_new_data(relation, self.component, new_data, short_uuid=request.request_id) - - return _diff - - def _relation_from_secret_label(self, secret_label: str) -> Relation | None: - """Retrieve the relation that belongs to a secret label.""" - contents = secret_label.split(".") - - if not (contents and len(contents) >= 3): - return - - try: - relation_id = int(contents[1]) - except ValueError: - return - - relation_name = contents[0] - - try: - return self.model.get_relation(relation_name, relation_id) - except ModelError: - return - - def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: - """Retrieve the relation that belongs to a secret label.""" - contents = secret_label.split(".") - - if not (contents and len(contents) >= 5): - return - - return contents[2] - - -class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): - """Event Handler for resource provider.""" - - on = ResourceProvidesEvents[TRequirerCommonModel]() # type: ignore[reportAssignmentType] - - def __init__( - self, - charm: CharmBase, - relation_name: str, - request_model: type[TRequirerCommonModel], - unique_key: str = "", - mtls_enabled: bool = False, - bulk_event: bool = False, - ): - """Builds a resource provider event handler. - - Args: - charm: The charm. - relation_name: The relation name this event handler is listening to. - request_model: The request model that is expected to be received. - unique_key: An optional unique key for that object. - mtls_enabled: If True, means the server supports MTLS integration. - bulk_event: If this is true, only one event will be emitted with all requests in the case of a v1 requirer. - """ - super().__init__(charm, relation_name, unique_key) - self.component = self.charm.app - self.request_model = request_model - self.interface = OpsRelationRepositoryInterface(charm, relation_name, request_model) - self.mtls_enabled = mtls_enabled - self.bulk_event = bulk_event - - @staticmethod - def _validate_diff(event: RelationEvent, _diff: Diff) -> None: - """Validates that entity information is not changed after relation is established. - - - When entity-type changes, backwards compatibility is broken. - - When extra-user-roles changes, role membership checks become incredibly complex. - - When extra-group-roles changes, role membership checks become incredibly complex. - """ - if not isinstance(event, RelationChangedEvent): - return - - for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: - if key in _diff.changed: - raise ValueError(f"Cannot change {key} after relation has already been created") - - def _dispatch_events(self, event: RelationEvent, _diff: Diff, request: RequirerCommonModel): - if self.mtls_enabled and "secret-mtls" in _diff.added: - getattr(self.on, "mtls_cert_updated").emit( - event.relation, app=event.app, unit=event.unit, request=request, old_mtls_cert=None - ) - return - # Emit a resource requested event if the setup key (resource name) - # was added to the relation databag, but the entity-type key was not. - if resource_added(_diff) and "entity-type" not in _diff.added: - getattr(self.on, "resource_requested").emit( - event.relation, - app=event.app, - unit=event.unit, - request=request, - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - # Emit an entity requested event if the setup key (resource name) - # was added to the relation databag, in addition to the entity-type key. - if resource_added(_diff) and "entity-type" in _diff.added: - getattr(self.on, "resource_entity_requested").emit( - event.relation, - app=event.app, - unit=event.unit, - request=request, - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - # Emit a permissions changed event if the setup key (resource name) - # was added to the relation databag, and the entity-permissions key changed. - if ( - not resource_added(_diff) - and "entity-type" not in _diff.added - and ("entity-permissions" in _diff.added or "entity-permissions" in _diff.changed) - ): - getattr(self.on, "resource_entity_permissions_changed").emit( - event.relation, app=event.app, unit=event.unit, request=request - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - @override - def _handle_event( - self, - event: RelationChangedEvent, - repository: AbstractRepository, - request: RequirerCommonModel, - ): - _diff = self.compute_diff(event.relation, request, repository) - - self._validate_diff(event, _diff) - self._dispatch_events(event, _diff, request) - - def _handle_bulk_event( - self, - event: RelationChangedEvent, - repository: AbstractRepository, - request_model: RequirerDataContractV1[TRequirerCommonModel], - ): - """Validate all the diffs, then dispatch the bulk event AND THEN stores the diff. - - This allows for the developer to process the diff and store it themselves - """ - for request in request_model.requests: - # Compute the diff without storing it so we can validate the diffs. - _diff = self.compute_diff(event.relation, request, repository, store=False) - self._validate_diff(event, _diff) - - getattr(self.on, "bulk_resources_requested").emit( - event.relation, app=event.app, unit=event.unit, requests=request_model.requests - ) - - # Store all the diffs if they were not already stored. - for request in request_model.requests: - new_data = request.model_dump( - mode="json", - exclude={"data"}, - context={"repository": repository}, - exclude_none=True, - exclude_defaults=True, - ) - store_new_data(event.relation, self.component, new_data, request.request_id) - - @override - def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: - if not self.mtls_enabled: - logger.info("MTLS is disabled, exiting early.") - return - if not event.secret.label: - return - - relation = self._relation_from_secret_label(event.secret.label) - short_uuid = self._short_uuid_from_secret_label(event.secret.label) - - if not relation: - logging.info( - f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" - ) - return - - if relation.app == self.charm.app: - logging.info("Secret changed event ignored for Secret Owner") - return - - if relation.name != self.relation_name: - logging.info("Secret changed on wrong relation.") - return - - remote_unit = None - for unit in relation.units: - if unit.app != self.charm.app: - remote_unit = unit - break - - repository = OpsRelationRepository(self.model, relation, component=relation.app) - version = repository.get_field("version") or "v0" - - old_mtls_cert = event.secret.get_content().get("mtls-cert") - logger.info("mtls-cert-updated") - - # V0, just fire the event. - if version == "v0": - request = build_model(repository, RequirerDataContractV0) - # V1, find the corresponding request. - else: - request_model = build_model(repository, RequirerDataContractV1[self.request_model]) - if not short_uuid: - return - for _request in request_model.requests: - if _request.request_id == short_uuid: - request = _request - break - else: - logger.info(f"Unknown request id {short_uuid}") - return - - getattr(self.on, "mtls_cert_updated").emit( - relation, - app=relation.app, - unit=remote_unit, - request=request, - mtls_cert=old_mtls_cert, - ) - - @override - def _on_relation_changed_event(self, event: RelationChangedEvent): - if not self.charm.unit.is_leader(): - return - - repository = OpsRelationRepository( - self.model, event.relation, component=event.relation.app - ) - - # Don't do anything until we get some data - if not repository.get_data(): - return - - version = repository.get_field("version") or "v0" - if version == "v0": - request_model = build_model(repository, RequirerDataContractV0) - old_name = request_model.original_field - request_model.request_id = None # For safety, let's ensure that we don't have a model. - self._handle_event(event, repository, request_model) - logger.info( - f"Patching databag for v0 compatibility: replacing 'resource' by '{old_name}'" - ) - self.interface.repository( - event.relation.id, - ).write_field(old_name, request_model.resource) - else: - request_model = build_model(repository, RequirerDataContractV1[self.request_model]) - if self.bulk_event: - self._handle_bulk_event(event, repository, request_model) - return - for request in request_model.requests: - self._handle_event(event, repository, request) - - def set_response(self, relation_id: int, response: ResourceProviderModel): - r"""Sets a response in the databag. - - This function will react accordingly to the version number. - If the version number is v0, then we write the data directly in the databag. - If the version number is v1, then we write the data in the list of responses. - - /!\ This function updates a response if it was already present in the databag! - - Args: - relation_id: The specific relation id for that event. - response: The response to write in the databag. - """ - if not self.charm.unit.is_leader(): - return - - relation = self.charm.model.get_relation(self.relation_name, relation_id) - - if not relation: - raise ValueError("Missing relation.") - - repository = OpsRelationRepository(self.model, relation, component=relation.app) - version = repository.get_field("version") or "v0" - - if version == "v0": - # Ensure the request_id is None - response.request_id = None - self.interface.write_model( - relation_id, response, context={"version": "v0"} - ) # {"database": "database-name", "secret-user": "uri", ...} - return - - model = self.interface.build_model(relation_id, DataContractV1[response.__class__]) - - # for/else syntax allows to execute the else if break was not called. - # This allows us to update or append easily. - for index, _response in enumerate(model.requests): - if _response.request_id == response.request_id: - model.requests[index] = response - break - else: - model.requests.append(response) - - self.interface.write_model(relation_id, model) - return - - -class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): - """Event Handler for resource requirer.""" - - on = ResourceRequiresEvents[TResourceProviderModel]() # type: ignore[reportAssignmentType] - - def __init__( - self, - charm: CharmBase, - relation_name: str, - requests: list[RequirerCommonModel], - response_model: type[TResourceProviderModel], - unique_key: str = "", - relation_aliases: list[str] | None = None, - ): - super().__init__(charm, relation_name, unique_key) - self.component = self.charm.unit - self.relation_aliases = relation_aliases - self._requests = requests - self.response_model = DataContractV1[response_model] - self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( - OpsRelationRepositoryInterface(charm, relation_name, self.response_model) - ) - - if requests: - self._request_model = requests[0].__class__ - else: - self._request_model = RequirerCommonModel - - # First, check that the number of aliases matches the one defined in charm metadata. - if self.relation_aliases: - relation_connection_limit = self.charm.meta.requires[relation_name].limit - if len(self.relation_aliases) != relation_connection_limit: - raise ValueError( - f"Invalid number of aliases, expected {relation_connection_limit}, received {len(self.relation_aliases)}" - ) - - # Created custom event names for each alias. - if self.relation_aliases: - for relation_alias in self.relation_aliases: - self.on.define_event( - f"{relation_alias}_resource_created", - ResourceCreatedEvent, - ) - self.on.define_event( - f"{relation_alias}_resource_entity_created", - ResourceEntityCreatedEvent, - ) - self.on.define_event( - f"{relation_alias}_endpoints_changed", - ResourceEndpointsChangedEvent, - ) - self.on.define_event( - f"{relation_alias}_read_only_endpoints_changed", - ResourceReadOnlyEndpointsChangedEvent, - ) - - ############################################################################## - # Extra useful functions - ############################################################################## - def is_resource_created( - self, - rel_id: int, - request_id: str, - model: DataContractV1[TResourceProviderModel] | None = None, - ) -> bool: - """Checks if a resource has been created or not. - - Args: - rel_id: The relation id to check. - request_id: The specific request id to check. - model: An optional model to use (for performances). - """ - if not model: - relation = self.model.get_relation(self.relation_name, rel_id) - if not relation: - return False - model = self.interface.build_model(relation_id=rel_id, component=relation.app) - for request in model.requests: - if request.request_id == request_id: - return request.secret_user is not None or request.secret_entity is not None - return False - - def are_all_resources_created(self, rel_id: int) -> bool: - """Checks that all resources have been created for a relation. - - Args: - rel_id: The relation id to check. - """ - relation = self.model.get_relation(self.relation_name, rel_id) - if not relation: - return False - model = self.interface.build_model(relation_id=rel_id, component=relation.app) - return all( - self.is_resource_created(rel_id, request.request_id, model) - for request in model.requests - if request.request_id - ) - - @staticmethod - def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: - # Actual checking method. - # No need to check for psycopg here, it's been checked before. - if not psycopg2: - return False - - try: - with psycopg2.connect(connection_string) as connection: - with connection.cursor() as cursor: - cursor.execute( - "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) - ) - return cursor.fetchone() is not None - except psycopg2.Error as e: - logger.exception( - f"failed to check whether {plugin} plugin is enabled in the database: %s", - str(e), - ) - return False - - def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: - """Returns whether a plugin is enabled in the database. - - Args: - plugin: name of the plugin to check. - relation_index: Optional index to check the database (default: 0 - first relation). - """ - if not psycopg2: - return False - - # Can't check a non existing relation. - if len(self.relations) <= relation_index: - return False - - relation = self.relations[relation_index] - model = self.interface.build_model(relation_id=relation.id, component=relation.app) - for request in model.requests: - if request.endpoints and request.username and request.password: - host = request.endpoints.split(":")[0] - username = request.username.get_secret_value() - password = request.password.get_secret_value() - - connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" - return self._is_pg_plugin_enabled(plugin, connection_string) - logger.info("No valid request to use to check for plugin.") - return False - - ############################################################################## - # Helpers for aliases - ############################################################################## - - def _assign_relation_alias(self, relation_id: int) -> None: - """Assigns an alias to a relation. - - This function writes in the unit data bag. - - Args: - relation_id: the identifier for a particular relation. - """ - # If no aliases were provided, return immediately. - if not self.relation_aliases: - return - - # Return if an alias was already assigned to this relation - # (like when there are more than one unit joining the relation). - relation = self.charm.model.get_relation(self.relation_name, relation_id) - if relation and relation.data[self.charm.unit].get("alias"): - return - - # Retrieve the available aliases (the ones that weren't assigned to any relation). - available_aliases = self.relation_aliases[:] - for relation in self.charm.model.relations[self.relation_name]: - alias = relation.data[self.charm.unit].get("alias") - if alias: - logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) - available_aliases.remove(alias) - - # Set the alias in the unit relation databag of the specific relation. - relation = self.charm.model.get_relation(self.relation_name, relation_id) - if relation: - relation.data[self.charm.unit].update({"alias": available_aliases[0]}) - - # We need to set relation alias also on the application level so, - # it will be accessible in show-unit juju command, executed for a consumer application unit - if relation and self.charm.unit.is_leader(): - relation.data[self.charm.app].update({"alias": available_aliases[0]}) - - def _emit_aliased_event( - self, event: RelationChangedEvent, event_name: str, response: ResourceProviderModel - ): - """Emit all aliased events.""" - alias = self._get_relation_alias(event.relation.id) - if alias: - getattr(self.on, f"{alias}_{event_name}").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - - def _get_relation_alias(self, relation_id: int) -> str | None: - """Gets the relation alias for a relation id.""" - for relation in self.charm.model.relations[self.relation_name]: - if relation.id == relation_id: - return relation.data[self.charm.unit].get("alias") - return None - - ############################################################################## - # Event Handlers - ############################################################################## - - def _on_secret_changed_event(self, event: SecretChangedEvent): - """Event notifying about a new value of a secret.""" - if not event.secret.label: - return - relation = self._relation_from_secret_label(event.secret.label) - short_uuid = self._short_uuid_from_secret_label(event.secret.label) - - if not relation: - logging.info( - f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" - ) - return - - if relation.app == self.charm.app: - logging.info("Secret changed event ignored for Secret Owner") - return - - if relation.name != self.relation_name: - logging.info("Secret changed on wrong relation.") - return - - remote_unit = None - for unit in relation.units: - if unit.app != self.charm.app: - remote_unit = unit - break - - response_model = self.interface.build_model(relation.id) - if not short_uuid: - return - for _response in response_model.requests: - if _response.request_id == short_uuid: - response = _response - break - else: - logger.info(f"Unknown request id {short_uuid}") - return - - getattr(self.on, "authentication_updated").emit( - relation, - app=relation.app, - unit=remote_unit, - response=response, - ) - - def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: - """Event emitted when the database relation is created.""" - super()._on_relation_created_event(event) - - repository = OpsRelationRepository(self.model, event.relation, self.charm.app) - - # If relations aliases were provided, assign one to the relation. - self._assign_relation_alias(event.relation.id) - - if not self.charm.unit.is_leader(): - return - - # Generate all requests id so they are saved already. - for request in self._requests: - request.request_id = gen_hash(request.resource, request.salt) - - full_request = RequirerDataContractV1[self._request_model]( - version="v1", requests=self._requests - ) - write_model(repository, full_request) - - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the database relation has changed.""" - is_subordinate = False - remote_unit_data = None - for key in event.relation.data.keys(): - if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): - remote_unit_data = event.relation.data[key] - elif isinstance(key, Application) and key.name != self.charm.app.name: - is_subordinate = event.relation.data[key].get("subordinated") == "true" - - if is_subordinate: - if not remote_unit_data or remote_unit_data.get("state") != "ready": - return - - repository = self.interface.repository(event.relation.id, event.app) - response_model = self.interface.build_model(event.relation.id, component=event.app) - - if not response_model.requests: - logger.info("Still waiting for data.") - return - - data = repository.get_field("data") - if not data: - logger.info("Missing data to compute diffs") - return - - request_map = TypeAdapter(dict[str, self._request_model]).validate_json(data) - - for response in response_model.requests: - response_id = response.request_id or gen_hash(response.resource, response.salt) - request = request_map.get(response_id, None) - if not request: - raise ValueError( - f"No request matching the response with response_id {response_id}" - ) - self._handle_event(event, repository, request, response) - - ############################################################################## - # Methods to handle specificities of relation events - ############################################################################## - - @override - def _handle_event( - self, - event: RelationChangedEvent, - repository: OpsRelationRepository, - request: RequirerCommonModel, - response: ResourceProviderModel, - ): - _diff = self.compute_diff(event.relation, response, repository, store=True) - - for newval in _diff.added: - if secret_group := response._get_secret_field(newval): - uri = getattr(response, newval.replace("-", "_")) - repository.register_secret(uri, secret_group, response.request_id) - - if "secret-user" in _diff.added and not request.entity_type: - logger.info(f"resource {response.resource} created at {datetime.now()}") - getattr(self.on, "resource_created").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "resource_created", response) - return - - if "secret-entity" in _diff.added and request.entity_type: - logger.info(f"entity {response.entity_name} created at {datetime.now()}") - getattr(self.on, "resource_entity_created").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "resource_entity_created", response) - return - - if "endpoints" in _diff.added or "endpoints" in _diff.changed: - logger.info(f"endpoints changed at {datetime.now()}") - getattr(self.on, "endpoints_changed").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "endpoints_changed", response) - return - - if "read-only-endpoints" in _diff.added or "read-only-endpoints" in _diff.changed: - logger.info(f"read-only-endpoints changed at {datetime.now()}") - getattr(self.on, "read_only_endpoints_changed").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "read_only_endpoints_changed", response) - return diff --git a/tests/v1/integration/kafka-connect-charm/lib/charms/data_platform_libs/v1/data_interfaces.py b/tests/v1/integration/kafka-connect-charm/lib/charms/data_platform_libs/v1/data_interfaces.py deleted file mode 100644 index e22388d1..00000000 --- a/tests/v1/integration/kafka-connect-charm/lib/charms/data_platform_libs/v1/data_interfaces.py +++ /dev/null @@ -1,2753 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -r"""Library to manage the relation for the data-platform products. - -This V1 has been specified in https://docs.google.com/document/d/1lnuonWnoQb36RWYwfHOBwU0VClLbawpTISXIC_yNKYo, and should be backward compatible with v0 clients. - -This library contains the Requires and Provides classes for handling the relation -between an application and multiple managed application supported by the data-team: -MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. - -#### Models - -This library exposes basic default models that can be used in most cases. -If you need more complex models, you can subclass them. - -```python -from charms.data_platform_libs.v1.data_interfaces import RequirerCommonModel, ExtraSecretStr - -class ExtendedCommonModel(RequirerCommonModel): - operator_password: ExtraSecretStr -``` - -Secret groups are handled using annotated types. If you wish to add extra secret groups, please follow the following model. The string metadata represents the secret group name, and `OptionalSecretStr` is a TypeAlias for `SecretStr | None`. Finally, `SecretStr` represents a field validating the URI pattern `secret:.*` - -```python -MyGroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] -``` - -Fields not specified as OptionalSecretStr and extended with a group name in the metadata will NOT get serialised. - - -#### Requirer Charm - -This library is a uniform interface to a selection of common database -metadata, with added custom events that add convenience to database management, -and methods to consume the application related data. - - -```python -from charms.data_platform_libs.v1.data_interfaces import ( - RequirerCommonModel, - RequirerDataContractV1, - ResourceCreatedEvent, - ResourceEntityCreatedEvent, - ResourceProviderModel, - ResourceRequirerEventHandler, -) - -class ClientCharm(CharmBase): - # Database charm that accepts connections from application charms. - def __init__(self, *args) -> None: - super().__init__(*args) - - requests = [ - RequirerCommonModel( - resource="clientdb", - ), - RequirerCommonModel( - resource="clientbis", - ), - RequirerCommonModel( - entity_type="USER", - ) - ] - self.database = ResourceRequirerEventHandler( - self,"database", requests, response_model=ResourceProviderModel - ) - self.framework.observe(self.database.on.resource_created, self._on_resource_created) - self.framework.observe(self.database.on.resource_entity_created, self._on_resource_entity_created) - - def _on_resource_created(self, event: ResourceCreatedEvent) -> None: - # Event triggered when a new database is created. - relation_id = event.relation.id - response = event.response # This is the response model - - username = event.response.username - password = event.response.password - ... - - def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: - # Event triggered when a new entity is created. - ... - -Compared to V1, this library makes heavy use of pydantic models, and allows for -multiple requests, specified as a list. -On the Requirer side, each response will trigger one custom event for that response. -This way, it allows for more strategic events to be emitted according to the request. - -As show above, the library provides some custom events to handle specific situations, which are listed below: -- resource_created: event emitted when the requested database is created. -- resource_entity_created: event emitted when the requested entity is created. -- endpoints_changed: event emitted when the read/write endpoints of the database have changed. -- read_only_endpoints_changed: event emitted when the read-only endpoints of the database - have changed. Event is not triggered if read/write endpoints changed too. - -If it is needed to connect multiple database clusters to the same relation endpoint -the application charm can implement the same code as if it would connect to only -one database cluster (like the above code example). - -To differentiate multiple clusters connected to the same relation endpoint -the application charm can use the name of the remote application: - -```python - -def _on_resource_created(self, event: ResourceCreatedEvent) -> None: - # Get the remote app name of the cluster that triggered this event - cluster = event.relation.app.name -``` - -It is also possible to provide an alias for each different database cluster/relation. - -So, it is possible to differentiate the clusters in two ways. -The first is to use the remote application name, i.e., `event.relation.app.name`, as above. - -The second way is to use different event handlers to handle each cluster events. -The implementation would be something like the following code: - -```python - -from charms.data_platform_libs.v1.data_interfaces import ( - RequirerCommonModel, - RequirerDataContractV1, - ResourceCreatedEvent, - ResourceEntityCreatedEvent, - ResourceProviderModel, - ResourceRequirerEventHandler, -) - -class ApplicationCharm(CharmBase): - # Application charm that connects to database charms. - - def __init__(self, *args): - super().__init__(*args) - - requests = [ - RequirerCommonModel( - resource="clientdb", - ), - RequirerCommonModel( - resource="clientbis", - ), - ] - # Define the cluster aliases and one handler for each cluster database created event. - self.database = ResourceRequirerEventHandler( - self, - relation_name="database" - relations_aliases = ["cluster1", "cluster2"], - requests= - ) - self.framework.observe( - self.database.on.cluster1_resource_created, self._on_cluster1_resource_created - ) - self.framework.observe( - self.database.on.cluster2_resource_created, self._on_cluster2_resource_created - ) - - def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: - # Handle the created database on the cluster named cluster1 - - # Create configuration file for app - config_file = self._render_app_config_file( - event.response.username, - event.response.password, - event.response.endpoints, - ) - ... - - def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: - # Handle the created database on the cluster named cluster2 - - # Create configuration file for app - config_file = self._render_app_config_file( - event.response.username, - event.response.password, - event.response.endpoints, - ) - ... -``` - -### Provider Charm - -Following an example of using the ResourceRequestedEvent, in the context of the -database charm code: - -```python -from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides - -class SampleCharm(CharmBase): - - def __init__(self, *args): - super().__init__(*args) - # Charm events defined in the database provides charm library. - self.provided_database = DatabaseProvides(self, relation_name="database") - self.framework.observe(self.provided_database.on.database_requested, - self._on_database_requested) - # Database generic helper - self.database = DatabaseHelper() - - def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: - # Handle the event triggered by a new database requested in the relation - # Retrieve the database name using the charm library. - db_name = event.database - # generate a new user credential - username = self.database.generate_user() - password = self.database.generate_password() - # set the credentials for the relation - self.provided_database.set_credentials(event.relation.id, username, password) - # set other variables for the relation event.set_tls("False") -``` - -As shown above, the library provides a custom event (database_requested) to handle -the situation when an application charm requests a new database to be created. -It's preferred to subscribe to this event instead of relation changed event to avoid -creating a new database when other information other than a database name is -exchanged in the relation databag. - -""" - -import copy -import hashlib -import json -import logging -import pickle -import random -import string -from abc import ABC, abstractmethod -from collections import namedtuple -from datetime import datetime -from enum import Enum -from typing import ( - Annotated, - Any, - ClassVar, - Generic, - Literal, - NewType, - TypeAlias, - TypeVar, - final, - overload, -) - -from ops import ( - CharmBase, - EventBase, - Model, - RelationChangedEvent, - RelationCreatedEvent, - RelationEvent, - Secret, - SecretChangedEvent, - SecretInfo, - SecretNotFoundError, -) -from ops.charm import CharmEvents -from ops.framework import EventSource, Handle, Object -from ops.model import Application, ModelError, Relation, Unit -from pydantic import ( - AfterValidator, - AliasChoices, - BaseModel, - ConfigDict, - Discriminator, - Field, - SecretStr, - SerializationInfo, - SerializerFunctionWrapHandler, - Tag, - TypeAdapter, - ValidationInfo, - model_serializer, - model_validator, -) -from pydantic.types import _SecretBase, _SecretField -from pydantic_core import CoreSchema, core_schema -from typing_extensions import TypeAliasType, override - -try: - import psycopg2 -except ImportError: - psycopg2 = None - -# The unique Charmhub library identifier, never change it -LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" - -# Increment this major API version when introducing breaking changes -LIBAPI = 1 - -# Increment this PATCH version before using `charmcraft publish-lib` or reset -# to 0 if you are raising the major API version -LIBPATCH = 0 - -PYDEPS = ["ops>=2.0.0", "pydantic>=2.11"] - -logger = logging.getLogger(__name__) - -MODEL_ERRORS = { - "not_leader": "this unit is not the leader", - "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", - "owner_no_refresh": "ERROR secret owner cannot use --refresh", -} - -RESOURCE_ALIASES = [ - "database", - "subject", - "topic", - "index", - "plugin-url", -] - -SECRET_PREFIX = "secret-" - - -############################################################################## -# Exceptions -############################################################################## - - -class DataInterfacesError(Exception): - """Common ancestor for DataInterfaces related exceptions.""" - - -class SecretError(DataInterfacesError): - """Common ancestor for Secrets related exceptions.""" - - -class SecretAlreadyExistsError(SecretError): - """A secret that was to be added already exists.""" - - -class SecretsUnavailableError(SecretError): - """Secrets aren't yet available for Juju version used.""" - - -class IllegalOperationError(DataInterfacesError): - """To be used when an operation is not allowed to be performed.""" - - -############################################################################## -# Global helpers / utilities -############################################################################## - - -def gen_salt() -> str: - """Generates a consistent salt.""" - return "".join(random.choices(string.ascii_letters + string.digits, k=16)) - - -def gen_hash(resource_name: str, salt: str) -> str: - """Generates a consistent hash based on the resource name and salt.""" - hasher = hashlib.sha256() - hasher.update(f"{resource_name}:{salt}".encode()) - return hasher.hexdigest()[:16] - - -def ensure_leader_for_app(f): - """Decorator to ensure that only leader can perform given operation.""" - - def wrapper(self, *args, **kwargs): - if self.component == self._local_app and not self._local_unit.is_leader(): - logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") - return - return f(self, *args, **kwargs) - - wrapper.leader_only = True - return wrapper - - -def get_encoded_dict( - relation: Relation, member: Unit | Application, field: str -) -> dict[str, Any] | None: - """Retrieve and decode an encoded field from relation data.""" - data = json.loads(relation.data[member].get(field, "{}")) - if isinstance(data, dict): - return data - logger.error("Unexpected datatype for %s instead of dict.", str(data)) - - -Diff = namedtuple("Diff", ["added", "changed", "deleted"]) -Diff.__doc__ = """ -A tuple for storing the diff between two data mappings. - -added - keys that were added -changed - keys that still exist but have new values -deleted - key that were deleted""" - - -def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: - """Retrieves the diff of the data in the relation changed databag for v1. - - Args: - old_data: dictionary of the stored data before the event. - new_data: dictionary of the received data to compute the diff. - - Returns: - a Diff instance containing the added, deleted and changed - keys from the event relation databag. - """ - old_data = old_data or {} - - # These are the keys that were added to the databag and triggered this event. - added = new_data.keys() - old_data.keys() - # These are the keys that were removed from the databag and triggered this event. - deleted = old_data.keys() - new_data.keys() - # These are the keys that already existed in the databag, - # but had their values changed. - changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} - # Return the diff with all possible changes. - return Diff(added, changed, deleted) - - -def resource_added(diff: Diff) -> bool: - """Ensures that one of the aliased resources has been added.""" - return any(item in diff.added for item in RESOURCE_ALIASES + ["resource"]) - - -def store_new_data( - relation: Relation, - component: Unit | Application, - new_data: dict[str, str], - short_uuid: str | None = None, -): - """Stores the new data in the databag for diff computation.""" - # First, the case for V0 - if not short_uuid: - relation.data[component].update({"data": json.dumps(new_data)}) - # Then the case for V1, where we have a ShortUUID - else: - data = json.loads(relation.data[component].get("data", "{}")) - if not isinstance(data, dict): - raise ValueError - newest_data = copy.deepcopy(data) - newest_data[short_uuid] = new_data - relation.data[component].update({"data": json.dumps(newest_data)}) - - -############################################################################## -# Helper classes -############################################################################## - -SecretGroup = NewType("SecretGroup", str) - - -SecretString = TypeAliasType("SecretString", Annotated[str, Field(pattern="secret:.*")]) - - -class SecretBool(_SecretField[bool]): - """Class for booleans as secrets.""" - - _inner_schema: ClassVar[CoreSchema] = core_schema.bool_schema() - _error_kind: ClassVar[str] = "bool_type" - - def _display(self) -> str: - return "****" - - -OptionalSecretStr: TypeAlias = SecretStr | None -OptionalSecretBool: TypeAlias = SecretBool | None - -OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) - -UserSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "user"] -TlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "tls"] -TlsSecretBool = Annotated[OptionalSecretBool, Field(exclude=True, default=None), "tls"] -MtlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mtls"] -ExtraSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "extra"] -EntitySecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "entity"] - - -class Scope(Enum): - """Peer relations scope.""" - - APP = "app" - UNIT = "unit" - - -class CachedSecret: - """Locally cache a secret. - - The data structure is precisely reusing/simulating as in the actual Secret Storage - """ - - KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] - - def __init__( - self, - model: Model, - component: Application | Unit, - label: str, - secret_uri: str | None = None, - ): - self._secret_meta = None - self._secret_content = {} - self._secret_uri = secret_uri - self.label = label - self._model = model - self.component = component - self.current_label = None - - @property - def meta(self) -> Secret | None: - """Getting cached secret meta-information.""" - if not self._secret_meta: - if not (self._secret_uri or self.label): - return - - try: - self._secret_meta = self._model.get_secret(label=self.label) - except SecretNotFoundError: - # Falling back to seeking for potential legacy labels - logger.info(f"Secret with label {self.label} not found") - - # If still not found, to be checked by URI, to be labelled with the proposed label - if not self._secret_meta and self._secret_uri: - self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) - return self._secret_meta - - ########################################################################## - # Public functions - ########################################################################## - - def add_secret( - self, - content: dict[str, str], - relation: Relation | None = None, - label: str | None = None, - ) -> Secret: - """Create a new secret.""" - if self._secret_uri: - raise SecretAlreadyExistsError( - "Secret is already defined with uri %s", self._secret_uri - ) - - label = self.label if not label else label - - secret = self.component.add_secret(content, label=label) - if relation and relation.app != self._model.app: - # If it's not a peer relation, grant is to be applied - secret.grant(relation) - self._secret_uri = secret.id - self._secret_meta = secret - return self._secret_meta - - def get_content(self) -> dict[str, str]: - """Getting cached secret content.""" - if not self._secret_content: - if self.meta: - try: - self._secret_content = self.meta.get_content(refresh=True) - except (ValueError, ModelError) as err: - # https://bugs.launchpad.net/juju/+bug/2042596 - # Only triggered when 'refresh' is set - if isinstance(err, ModelError) and not any( - msg in str(err) for msg in self.KNOWN_MODEL_ERRORS - ): - raise - # Due to: ValueError: Secret owner cannot use refresh=True - self._secret_content = self.meta.get_content() - return self._secret_content - - def set_content(self, content: dict[str, str]) -> None: - """Setting cached secret content.""" - if not self.meta: - return - - if content == self.get_content(): - return - - if content: - self.meta.set_content(content) - self._secret_content = content - else: - self.meta.remove_all_revisions() - - def get_info(self) -> SecretInfo | None: - """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" - if self.meta: - return self.meta.get_info() - - def remove(self) -> None: - """Remove secret.""" - if not self.meta: - raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") - try: - self.meta.remove_all_revisions() - except SecretNotFoundError: - pass - self._secret_content = {} - self._secret_meta = None - self._secret_uri = None - - -class SecretCache: - """A data structure storing CachedSecret objects.""" - - def __init__(self, model: Model, component: Application | Unit): - self._model = model - self.component = component - self._secrets: dict[str, CachedSecret] = {} - - def get(self, label: str, uri: str | None = None) -> CachedSecret | None: - """Getting a secret from Juju Secret store or cache.""" - if not self._secrets.get(label): - secret = CachedSecret(self._model, self.component, label, uri) - if secret.meta: - self._secrets[label] = secret - return self._secrets.get(label) - - def add(self, label: str, content: dict[str, str], relation: Relation) -> CachedSecret: - """Adding a secret to Juju Secret.""" - if self._secrets.get(label): - raise SecretAlreadyExistsError(f"Secret {label} already exists") - - secret = CachedSecret(self._model, self.component, label) - secret.add_secret(content, relation) - self._secrets[label] = secret - return self._secrets[label] - - def remove(self, label: str) -> None: - """Remove a secret from the cache.""" - if secret := self.get(label): - try: - secret.remove() - self._secrets.pop(label) - except (SecretsUnavailableError, KeyError): - pass - else: - return - logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) - - -############################################################################## -# Models classes -############################################################################## - - -class PeerModel(BaseModel): - """Common Model for all peer relations.""" - - model_config = ConfigDict( - validate_by_name=True, - validate_by_alias=True, - populate_by_name=True, - serialize_by_alias=True, - alias_generator=lambda x: x.replace("_", "-"), - extra="allow", - ) - - @model_validator(mode="after") - def extract_secrets(self, info: ValidationInfo): - """Extract all secret_fields into their local field.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing as we're lacking context here.") - return self - repository: AbstractRepository = info.context.get("repository") - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = SecretGroup(field_info.metadata[0]) - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret = repository.get_secret(secret_group, secret_uri=None) - - if not secret: - logger.info(f"No secret for group {secret_group}") - continue - - value = secret.get_content().get(aliased_field) - - if value and field_info.annotation == OptionalSecretBool: - value = SecretBool(json.loads(value)) - elif value: - value = SecretStr(value) - setattr(self, field, value) - - return self - - @model_serializer(mode="wrap") - def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): - """Serializes the model writing the secrets in their respective secrets.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing serialization as we're lacking context here.") - return handler(self) - repository: AbstractRepository = info.context.get("repository") - - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = SecretGroup(field_info.metadata[0]) - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret = repository.get_secret(secret_group, secret_uri=None) - - value = getattr(self, field) - - actual_value = ( - value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value - ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) - - if secret is None: - if value: - secret = repository.add_secret( - aliased_field, - actual_value, - secret_group, - ) - if not secret or not secret.meta: - raise SecretError("No secret to send back") - continue - - content = secret.get_content() - full_content = copy.deepcopy(content) - - if value is None: - full_content.pop(aliased_field, None) - else: - full_content.update({aliased_field: actual_value}) - secret.set_content(full_content) - return handler(self) - - -class CommonModel(BaseModel): - """Common Model for both requirer and provider. - - request_id stores the request identifier for easier access. - resource is the requested resource. - """ - - model_config = ConfigDict( - validate_by_name=True, - validate_by_alias=True, - populate_by_name=True, - serialize_by_alias=True, - alias_generator=lambda x: x.replace("_", "-"), - extra="allow", - ) - - resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") - request_id: str | None = Field(default=None) - salt: str = Field( - description="This salt is used to create unique hashes even when other fields map 1-1", - default_factory=gen_salt, - ) - - @model_validator(mode="after") - def extract_secrets(self, info: ValidationInfo): - """Extract all secret_fields into their local field.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing as we're lacking context here.") - return self - repository: AbstractRepository = info.context.get("repository") - short_uuid = self.request_id or gen_hash(self.resource, self.salt) - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = field_info.metadata[0] - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret_field = repository.secret_field(secret_group, aliased_field).replace( - "-", "_" - ) - secret_uri: str | None = getattr(self, secret_field, None) - - if not secret_uri: - continue - - secret = repository.get_secret( - secret_group, secret_uri=secret_uri, short_uuid=short_uuid - ) - - if not secret: - logger.info(f"No secret for group {secret_group} and short uuid {short_uuid}") - continue - - value = secret.get_content().get(aliased_field) - if value and field_info.annotation == OptionalSecretBool: - value = SecretBool(json.loads(value)) - elif value: - value = SecretStr(value) - - setattr(self, field, value) - return self - - @model_serializer(mode="wrap") - def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): - """Serializes the model writing the secrets in their respective secrets.""" - _encountered_secrets: set[tuple[CachedSecret, str]] = set() - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing serialization as we're lacking context here.") - return handler(self) - repository: AbstractRepository = info.context.get("repository") - short_uuid = self.request_id or gen_hash(self.resource, self.salt) - # Backward compatibility for v0 regarding secrets. - if info.context.get("version") == "v0": - short_uuid = None - - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = field_info.metadata[0] - if not secret_group: - raise SecretsUnavailableError(field) - aliased_field = field_info.serialization_alias or field - secret_field = repository.secret_field(secret_group, aliased_field).replace( - "-", "_" - ) - secret_uri: str | None = getattr(self, secret_field, None) - secret = repository.get_secret( - secret_group, secret_uri=secret_uri, short_uuid=short_uuid - ) - - value = getattr(self, field) - - actual_value = ( - value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value - ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) - - if secret is None: - if value: - secret = repository.add_secret( - aliased_field, actual_value, secret_group, short_uuid - ) - if not secret or not secret.meta: - raise SecretError("No secret to send back") - setattr(self, secret_field, secret.meta.id) - continue - - content = secret.get_content() - full_content = copy.deepcopy(content) - - if value is None: - full_content.pop(aliased_field, None) - _encountered_secrets.add((secret, secret_field)) - else: - full_content.update({aliased_field: actual_value}) - secret.set_content(full_content) - - # Delete all empty secrets and clean up their fields. - for secret, secret_field in _encountered_secrets: - if not secret.get_content(): - # Setting a field to '' deletes it - setattr(self, secret_field, "") - repository.delete_secret(secret.label) - - return handler(self) - - @classmethod - def _get_secret_field(cls, field: str) -> SecretGroup | None: - """Checks if the field is a secret uri or not.""" - if not field.startswith(SECRET_PREFIX): - return None - - value = field.split("-")[1] - if info := cls.__pydantic_fields__.get(field.replace("-", "_")): - if info.annotation == SecretString: - return SecretGroup(value) - return None - - -class EntityPermissionModel(BaseModel): - """Entity Permissions Model.""" - - resource_name: str - resource_type: str - privileges: list - - -class RequirerCommonModel(CommonModel): - """Requirer side of the request model. - - extra_user_roles is used to request more roles for that user. - external_node_connectivity is used to indicate that the URI should be made for external clients when True - """ - - extra_user_roles: str | None = Field(default=None) - extra_group_roles: str | None = Field(default=None) - external_node_connectivity: bool = Field(default=False) - entity_type: Literal["USER", "GROUP"] | None = Field(default=None) - entity_permissions: list[EntityPermissionModel] | None = Field(default=None) - secret_mtls: SecretString | None = Field(default=None) - mtls_cert: MtlsSecretStr = Field(default=None) - - @model_validator(mode="after") - def validate_fields(self): - """Validates that no inconsistent request is being sent.""" - if self.entity_type and self.entity_type not in ["USER", "GROUP"]: - raise ValueError("Invalid entity-type. Possible values are USER and GROUP") - - if self.entity_type == "USER" and self.extra_group_roles: - raise ValueError("Inconsistent entity information. Use extra_user_roles instead") - - if self.entity_type == "GROUP" and self.extra_user_roles: - raise ValueError("Inconsistent entity information. Use extra_group_roles instead") - - return self - - -class ProviderCommonModel(CommonModel): - """Serialized fields added to the databag. - - endpoints stores the endpoints exposed to that client. - secret_user is a secret URI mapping to the user credentials - secret_tls is a secret URI mapping to the TLS certificate - secret_extra is a secret URI for all additional secrets requested. - """ - - endpoints: str | None = Field(default=None) - read_only_endpoints: str | None = Field(default=None) - secret_user: SecretString | None = Field(default=None) - secret_tls: SecretString | None = Field(default=None) - secret_extra: SecretString | None = Field(default=None) - secret_entity: SecretString | None = Field(default=None) - - -class ResourceProviderModel(ProviderCommonModel): - """Extended model including the deserialized fields.""" - - username: UserSecretStr = Field(default=None) - password: UserSecretStr = Field(default=None) - uris: UserSecretStr = Field(default=None) - read_only_uris: UserSecretStr = Field(default=None) - tls: TlsSecretBool = Field(default=None) - tls_ca: TlsSecretStr = Field(default=None) - entity_name: EntitySecretStr = Field(default=None) - entity_password: EntitySecretStr = Field(default=None) - version: str | None = Field(default=None) - - -class RequirerDataContractV0(RequirerCommonModel): - """Backward compatibility.""" - - version: Literal["v0"] = Field(default="v0") - - original_field: str = Field(exclude=True, default="") - - @model_validator(mode="before") - @classmethod - def ensure_original_field(cls, data: Any): - """Ensures that we keep the original field.""" - if isinstance(data, dict): - for alias in RESOURCE_ALIASES: - if data.get(alias) is not None: - data["original_field"] = alias - break - else: - for alias in RESOURCE_ALIASES: - if getattr(data, alias) is not None: - data.original_field = alias - return data - - -TResourceProviderModel = TypeVar("TResourceProviderModel", bound=ResourceProviderModel) -TRequirerCommonModel = TypeVar("TRequirerCommonModel", bound=RequirerCommonModel) - - -class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): - """The new Data Contract.""" - - version: Literal["v1"] = Field(default="v1") - requests: list[TRequirerCommonModel] - - -def discriminate_on_version(payload: Any) -> str: - """Use the version to discriminate.""" - if isinstance(payload, dict): - return payload.get("version", "v0") - return getattr(payload, "version", "v0") - - -RequirerDataContractType = Annotated[ - Annotated[RequirerDataContractV0, Tag("v0")] | Annotated[RequirerDataContractV1, Tag("v1")], - Discriminator(discriminate_on_version), -] - - -RequirerDataContract = TypeAdapter(RequirerDataContractType) - - -class DataContractV0(ResourceProviderModel): - """The Data contract of the response, for V0.""" - - -class DataContractV1(BaseModel, Generic[TResourceProviderModel]): - """The Data contract of the response, for V1.""" - - version: Literal["v1"] = Field(default="v1") - requests: list[TResourceProviderModel] = Field(default_factory=list) - - -DataContact = TypeAdapter(DataContractV1[ResourceProviderModel]) - - -TCommonModel = TypeVar("TCommonModel", bound=CommonModel) - - -def is_topic_value_acceptable(value: str | None) -> str | None: - """Check whether the given Kafka topic value is acceptable.""" - if value and "*" in value[:3]: - raise ValueError(f"Error on topic '{value}',, unacceptable value.") - return value - - -class KafkaRequestModel(RequirerCommonModel): - """Specialised model for Kafka.""" - - consumer_group_prefix: Annotated[str | None, AfterValidator(is_topic_value_acceptable)] = ( - Field(default=None) - ) - - -class KafkaResponseModel(ResourceProviderModel): - """Kafka response model.""" - - consumer_group_prefix: ExtraSecretStr = Field(default=None) - zookeeper_uris: ExtraSecretStr = Field(default=None) - - -############################################################################## -# AbstractRepository class -############################################################################## - - -class AbstractRepository(ABC): - """Abstract repository interface.""" - - @abstractmethod - def get_secret( - self, secret_group, secret_uri: str | None, short_uuid: str | None = None - ) -> CachedSecret | None: - """Gets a secret from the secret cache by uri or label.""" - ... - - @abstractmethod - def get_secret_field( - self, - field: str, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> str | None: - """Gets a value for a field stored in a secret group.""" - ... - - @abstractmethod - def get_field(self, field: str) -> str | None: - """Gets the value for one field.""" - ... - - @abstractmethod - def get_fields(self, *fields: str) -> dict[str, str | None]: - """Gets the values for all provided fields.""" - ... - - @abstractmethod - def write_field(self, field: str, value: Any) -> None: - """Writes the value in the field, without any secret support.""" - ... - - @abstractmethod - def write_fields(self, mapping: dict[str, Any]) -> None: - """Writes the values of mapping in the fields without any secret support (keys of mapping).""" - ... - - def write_secret_field( - self, field: str, value: Any, group: SecretGroup - ) -> CachedSecret | None: - """Writes a secret field.""" - ... - - @abstractmethod - def add_secret( - self, - field: str, - value: Any, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> CachedSecret | None: - """Gets a value for a field stored in a secret group.""" - ... - - @abstractmethod - def delete_secret(self, label: str): - """Deletes a secret by its label.""" - ... - - @abstractmethod - def delete_field(self, field: str) -> None: - """Deletes a field.""" - ... - - @abstractmethod - def delete_fields(self, *fields: str) -> None: - """Deletes all the provided fields.""" - ... - - @abstractmethod - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - """Delete a field stored in a secret group.""" - ... - - @abstractmethod - def register_secret(self, secret_group: SecretGroup, short_uuid: str | None = None) -> None: - """Registers a secret using the repository.""" - ... - - @abstractmethod - def get_data(self) -> dict[str, Any] | None: - """Gets the whole data.""" - ... - - @abstractmethod - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Builds a secret field.""" - - -class OpsRepository(AbstractRepository): - """Implementation for ops repositories, with some methods left out.""" - - SECRET_FIELD_NAME: str - - IGNORES_GROUPS: list[SecretGroup] = [] - - uri_to_databag: bool = True - - def __init__( - self, - model: Model, - relation: Relation | None, - component: Unit | Application, - ): - self._local_app = model.app - self._local_unit = model.unit - self.relation = relation - self.component = component - self.model = model - self.secrets = SecretCache(model, component) - - @abstractmethod - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None - ) -> str: - """Generate unique group mapping for secrets within a relation context.""" - ... - - @override - def get_data(self) -> dict[str, Any] | None: - ret: dict[str, Any] = {} - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - for key, value in self.relation.data[self.component].items(): - try: - ret[key] = json.loads(value) - except json.JSONDecodeError: - ret[key] = value - - return ret - - @override - @ensure_leader_for_app - def get_field( - self, - field: str, - ) -> str | None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - relation_data = self.relation.data[self.component] - return relation_data.get(field) - - @override - @ensure_leader_for_app - def get_fields(self, *fields: str) -> dict[str, str]: - res = {} - for field in fields: - if (value := self.get_field(field)) is not None: - res[field] = value - return res - - @override - @ensure_leader_for_app - def write_field(self, field: str, value: Any) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - if not value: - return None - self.relation.data[self.component].update({field: value}) - - @override - @ensure_leader_for_app - def write_fields(self, mapping: dict[str, Any]) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - (self.write_field(field, value) for field, value in mapping.items()) - - @override - @ensure_leader_for_app - def write_secret_field( - self, field: str, value: Any, secret_group: SecretGroup - ) -> CachedSecret | None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - label = self._generate_secret_label(self.relation, secret_group) - secret_uri = self.get_field(self.secret_field(secret_group, field)) - - secret = self.secrets.get(label=label, uri=secret_uri) - if not secret: - return self.add_secret(field, value, secret_group) - else: - content = secret.get_content() - full_content = copy.deepcopy(content) - full_content.update({field: value}) - secret.set_content(full_content) - return secret - - @override - @ensure_leader_for_app - def delete_field(self, field: str) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - relation_data = self.relation.data[self.component] - try: - relation_data.pop(field) - except KeyError: - logger.debug( - f"Non existent field {field} was attempted to be removed from the databag (relation ID: {self.relation.id})" - ) - - @override - @ensure_leader_for_app - def delete_fields(self, *fields: str) -> None: - (self.delete_field(field) for field in fields) - - @override - @ensure_leader_for_app - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - relation_data = self.relation.data[self.component] - secret_field = self.secret_field(secret_group, field) - - label = self._generate_secret_label(self.relation, secret_group) - secret_uri = relation_data.get(secret_field) - - secret = self.secrets.get(label=label, uri=secret_uri) - - if not secret: - logging.error(f"Can't delete secret for relation {self.relation.id}") - return None - - content = secret.get_content() - new_content = copy.deepcopy(content) - try: - new_content.pop(field) - except KeyError: - logging.debug( - f"Non-existing secret '{field}' was attempted to be removed" - f"from relation {self.relation.id} and group {secret_group}" - ) - - # Write the new secret content if necessary - if new_content: - secret.set_content(new_content) - return - - # Remove the secret from the relation if it's fully gone. - try: - relation_data.pop(field) - except KeyError: - pass - self.secrets.remove(label) - return - - @ensure_leader_for_app - def register_secret(self, uri: str, secret_group: SecretGroup, short_uuid: str | None = None): - """Registers the secret group for this relation. - - [MAGIC HERE] - If we fetch a secret using get_secret(id=, label=), - then will be "stuck" on the Secret object, whenever it may - appear (i.e. as an event attribute, or fetched manually) on future occasions. - - This will allow us to uniquely identify the secret on Provider side (typically on - 'secret-changed' events), and map it to the corresponding relation. - """ - if not self.relation: - raise ValueError("Cannot register without relation.") - - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - CachedSecret(self.model, self.component, label, uri).meta - - @override - def get_secret( - self, secret_group, secret_uri: str | None, short_uuid: str | None = None - ) -> CachedSecret | None: - """Gets a secret from the secret cache by uri or label.""" - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - return None - - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - - return self.secrets.get(label=label, uri=secret_uri) - - @override - def get_secret_field( - self, - field: str, - secret_group: SecretGroup, - uri: str | None = None, - short_uuid: str | None = None, - ) -> Any | None: - """Gets a value for a field stored in a secret group.""" - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - return None - - secret_field = self.secret_field(secret_group, field) - - relation_data = self.relation.data[self.component] - secret_uri = uri or relation_data.get(secret_field) - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - - if self.uri_to_databag and not secret_uri: - logger.info(f"No secret for group {secret_group} in relation {self.relation}") - return None - - secret = self.secrets.get(label=label, uri=secret_uri) - - if not secret: - logger.info(f"No secret for group {secret_group} in relation {self.relation}") - return None - - content = secret.get_content().get(field) - - if not content: - return - - try: - return json.loads(content) - except json.JSONDecodeError: - return content - - @override - @ensure_leader_for_app - def add_secret( - self, - field: str, - value: Any, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> CachedSecret | None: - if not self.relation: - logger.info("No relation to get value from") - return None - - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - - label = self._generate_secret_label(self.relation, secret_group, short_uuid) - - secret = self.secrets.add(label, {field: value}, self.relation) - - if not secret.meta or not secret.meta.id: - logging.error("Secret is missing Secret ID") - raise SecretError("Secret added but is missing Secret ID") - - return secret - - @override - @ensure_leader_for_app - def delete_secret(self, label: str) -> None: - self.secrets.remove(label) - - -@final -class OpsRelationRepository(OpsRepository): - """Implementation of the Abstract Repository for non peer relations.""" - - SECRET_FIELD_NAME: str = "secret" - - @override - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None - ) -> str: - """Generate unique group_mappings for secrets within a relation context.""" - if short_uuid: - return f"{relation.name}.{relation.id}.{short_uuid}.{secret_group}.secret" - return f"{relation.name}.{relation.id}.{secret_group}.secret" - - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Generates the field name to store in the peer relation.""" - return f"{self.SECRET_FIELD_NAME}-{secret_group}" - - -class OpsPeerRepository(OpsRepository): - """Implementation of the Ops Repository for peer relations.""" - - SECRET_FIELD_NAME = "internal_secret" - - IGNORES_GROUPS = [ - SecretGroup("user"), - SecretGroup("entity"), - SecretGroup("mtls"), - SecretGroup("tls"), - ] - - uri_to_databag: bool = False - - @property - def scope(self) -> Scope: - """Returns a scope.""" - if isinstance(self.component, Application): - return Scope.APP - if isinstance(self.component, Unit): - return Scope.UNIT - raise ValueError("Invalid component, neither a Unit nor an Application") - - @override - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None - ) -> str: - """Generate unique group_mappings for secrets within a relation context.""" - members = [relation.name, self._local_app.name, self.scope.value] - - if secret_group != SecretGroup("extra"): - members.append(secret_group) - return f"{'.'.join(members)}" - - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Generates the field name to store in the peer relation.""" - if not field: - raise ValueError("Must have a field.") - return f"{field}@{secret_group}" - - -@final -class OpsPeerUnitRepository(OpsPeerRepository): - """Implementation for a unit.""" - - @override - def __init__(self, model: Model, relation: Relation | None, component: Unit): - super().__init__(model, relation, component) - - -@final -class OpsOtherPeerUnitRepository(OpsPeerRepository): - """Implementation for a remote unit.""" - - @override - def __init__(self, model: Model, relation: Relation | None, component: Unit): - if component == model.unit: - raise ValueError(f"Can't instantiate {self.__class__.__name__} with local unit.") - super().__init__(model, relation, component) - - @override - def write_field(self, field: str, value: Any) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def write_fields(self, mapping: dict[str, Any]) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def add_secret( - self, field: str, value: Any, secret_group: SecretGroup, short_uuid: str | None = None - ) -> CachedSecret | None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_field(self, field: str) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_fields(self, *fields: str) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - -TRepository = TypeVar("TRepository", bound=OpsRepository) -TCommon = TypeVar("TCommon", bound=BaseModel) -TPeerCommon = TypeVar("TPeerCommon", bound=PeerModel) -TCommonBis = TypeVar("TCommonBis", bound=BaseModel) - - -class RepositoryInterface(Generic[TRepository, TCommon]): - """Repository builder.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - component: Unit | Application, - repository_type: type[TRepository], - model: type[TCommon] | TypeAdapter | None, - ): - self.charm = charm - self._model = charm.model - self.repository_type = repository_type - self.relation_name = relation_name - self.model = model - self.component = component - - @property - def relations(self) -> list[Relation]: - """The list of Relation instances associated with this relation name.""" - return self._model.relations[self.relation_name] - - def repository( - self, relation_id: int, component: Unit | Application | None = None - ) -> TRepository: - """Returns a repository for the relation.""" - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - return self.repository_type(self._model, relation, component or self.component) - - @overload - def build_model( - self, - relation_id: int, - model: type[TCommonBis], - component: Unit | Application | None = None, - ) -> TCommonBis: ... - - @overload - def build_model( - self, - relation_id: int, - model: type[TCommon], - component: Unit | Application | None = None, - ) -> TCommon: ... - - @overload - def build_model( - self, - relation_id: int, - model: TypeAdapter[TCommonBis], - component: Unit | Application | None = None, - ) -> TCommonBis: ... - - @overload - def build_model( - self, - relation_id: int, - model: None = None, - component: Unit | Application | None = None, - ) -> TCommon: ... - - def build_model( - self, - relation_id: int, - model: type[TCommon] | TypeAdapter[TCommonBis] | None = None, - component: Unit | Application | None = None, - ) -> TCommon | TCommonBis: - """Builds a model using the repository for that relation.""" - model = model or self.model # First the provided model (allows for specialisation) - component = component or self.component - if not model: - raise ValueError("Missing model to specialise data") - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - return build_model(self.repository_type(self._model, relation, component), model) - - def write_model( - self, relation_id: int, model: BaseModel, context: dict[str, str] | None = None - ): - """Writes the model using the repository.""" - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - - write_model( - self.repository_type(self._model, relation, self.component), model, context=context - ) - - -class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, TCommon]): - """Specialised Interface to build repositories for app peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.app, OpsRelationRepository, model) - - -class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): - """Specialised Interface to build repositories for app peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.app, OpsPeerRepository, model) - - -class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): - """Specialised Interface to build repositories for this unit peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.unit, OpsPeerUnitRepository, model) - - -class OpsOtherPeerUnitRepositoryInterface( - RepositoryInterface[OpsOtherPeerUnitRepository, TPeerCommon] -): - """Specialised Interface to build repositories for another unit peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - unit: Unit, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, unit, OpsOtherPeerUnitRepository, model) - - -############################################################################## -# DDD implementation methods -############################################################################## -############################################################################## - - -def build_model(repository: AbstractRepository, model: type[TCommon] | TypeAdapter) -> TCommon: - """Builds a common model using the provided repository and provided model structure.""" - data = repository.get_data() or {} - - data.pop("data", None) - - # Beware this means all fields should have a default value here. - if isinstance(model, TypeAdapter): - return model.validate_python(data, context={"repository": repository}) - - return model.model_validate(data, context={"repository": repository}) - - -def write_model( - repository: AbstractRepository, model: BaseModel, context: dict[str, str] | None = None -): - """Writes the data stored in the model using the repository object.""" - context = context or {} - dumped = model.model_dump( - mode="json", context={"repository": repository} | context, exclude_none=False - ) - for field, value in dumped.items(): - if value is None: - repository.delete_field(field) - continue - dumped_value = value if isinstance(value, str) else json.dumps(value) - repository.write_field(field, dumped_value) - - -############################################################################## -# Custom Events -############################################################################## - - -class ResourceProviderEvent(EventBase, Generic[TRequirerCommonModel]): - """Resource requested event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, request - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - request: TRequirerCommonModel, - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.request = request - - def snapshot(self) -> dict[str, Any]: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["request"] = pickle.dumps(self.request) - return snapshot - - def restore(self, snapshot: dict[str, Any]): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - self.request = pickle.loads(snapshot["request"]) - - -class ResourceRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource requested event.""" - - pass - - -class ResourceEntityRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource Entity requested event.""" - - pass - - -class ResourceEntityPermissionsChangedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource entity permissions changed event.""" - - pass - - -class MtlsCertUpdatedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource entity permissions changed event.""" - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - request: TRequirerCommonModel, - old_mtls_cert: str | None = None, - ): - super().__init__(handle, relation, app, unit, request) - - self.old_mtls_cert = old_mtls_cert - - def snapshot(self): - """Return a snapshot of the event.""" - return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} - - def restore(self, snapshot): - """Restore the event from a snapshot.""" - super().restore(snapshot) - self.old_mtls_cert = snapshot["old_mtls_cert"] - - -class BulkResourcesRequestedEvent(EventBase, Generic[TRequirerCommonModel]): - """Resource requested event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, request - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - requests: list[TRequirerCommonModel], - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.requests = requests - - def snapshot(self) -> dict[str, Any]: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["requests"] = [pickle.dumps(request) for request in self.requests] - return snapshot - - def restore(self, snapshot: dict[str, Any]): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - self.requests = [pickle.loads(request) for request in snapshot["requests"]] - - -class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): - """Database events. - - This class defines the events that the database can emit. - """ - - bulk_resources_requested = EventSource(BulkResourcesRequestedEvent) - resource_requested = EventSource(ResourceRequestedEvent) - resource_entity_requested = EventSource(ResourceEntityRequestedEvent) - resource_entity_permissions_changed = EventSource(ResourceEntityPermissionsChangedEvent) - mtls_cert_updated = EventSource(MtlsCertUpdatedEvent) - - -class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): - """Resource created/changed event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, response - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - response: TResourceProviderModel, - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.response = response - - def snapshot(self) -> dict: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["response"] = pickle.dumps(self.response) - return snapshot - - def restore(self, snapshot: dict): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - - self.response = pickle.loads(snapshot["response"]) - - -class ResourceCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Resource has been created.""" - - pass - - -class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Resource entity has been created.""" - - pass - - -class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read/Write enpoints are changed.""" - - pass - - -class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read-only enpoints are changed.""" - - pass - - -class AuthenticationUpdatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Authentication was updated for a user.""" - - pass - - -class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): - """Database events. - - This class defines the events that the database can emit. - """ - - resource_created = EventSource(ResourceCreatedEvent) - resource_entity_created = EventSource(ResourceEntityCreatedEvent) - endpoints_changed = EventSource(ResourceEndpointsChangedEvent) - read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) - authentication_updated = EventSource(AuthenticationUpdatedEvent) - - -############################################################################## -# Event Handlers -############################################################################## - - -class EventHandlers(Object): - """Requires-side of the relation.""" - - component: Application | Unit - interface: RepositoryInterface - - def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ""): - """Manager of base client relations.""" - if not unique_key: - unique_key = relation_name - super().__init__(charm, unique_key) - - self.charm = charm - self.relation_name = relation_name - - self.framework.observe( - charm.on[self.relation_name].relation_changed, - self._on_relation_changed_event, - ) - - self.framework.observe( - self.charm.on[self.relation_name].relation_created, - self._on_relation_created_event, - ) - - self.framework.observe( - charm.on.secret_changed, - self._on_secret_changed_event, - ) - - @property - def relations(self) -> list[Relation]: - """Shortcut to get access to the relations.""" - return self.interface.relations - - # Event handlers - - def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: - """Event emitted when the relation is created.""" - pass - - @abstractmethod - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - - @abstractmethod - def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - - @abstractmethod - def _handle_event( - self, - ): - """Handles the event and reacts accordingly.""" - pass - - def compute_diff( - self, - relation: Relation, - request: RequirerCommonModel | ResourceProviderModel, - repository: AbstractRepository | None = None, - store: bool = True, - ) -> Diff: - """Computes, stores and returns a diff for that request.""" - if not repository: - repository = OpsRelationRepository(self.model, relation, component=relation.app) - - # Gets the data stored in the databag for diff computation - old_data = get_encoded_dict(relation, self.component, "data") - - # In case we're V1, we select specifically this request - if old_data and request.request_id: - old_data: dict | None = old_data.get(request.request_id, None) - - # dump the data of the current request so we can compare - new_data = request.model_dump( - mode="json", - exclude={"data"}, - exclude_none=True, - exclude_defaults=True, - ) - - # Computes the diff - _diff = diff(old_data, new_data) - - if store: - # Update the databag with the new data for later diff computations - store_new_data(relation, self.component, new_data, short_uuid=request.request_id) - - return _diff - - def _relation_from_secret_label(self, secret_label: str) -> Relation | None: - """Retrieve the relation that belongs to a secret label.""" - contents = secret_label.split(".") - - if not (contents and len(contents) >= 3): - return - - try: - relation_id = int(contents[1]) - except ValueError: - return - - relation_name = contents[0] - - try: - return self.model.get_relation(relation_name, relation_id) - except ModelError: - return - - def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: - """Retrieve the relation that belongs to a secret label.""" - contents = secret_label.split(".") - - if not (contents and len(contents) >= 5): - return - - return contents[2] - - -class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): - """Event Handler for resource provider.""" - - on = ResourceProvidesEvents[TRequirerCommonModel]() # type: ignore[reportAssignmentType] - - def __init__( - self, - charm: CharmBase, - relation_name: str, - request_model: type[TRequirerCommonModel], - unique_key: str = "", - mtls_enabled: bool = False, - bulk_event: bool = False, - ): - """Builds a resource provider event handler. - - Args: - charm: The charm. - relation_name: The relation name this event handler is listening to. - request_model: The request model that is expected to be received. - unique_key: An optional unique key for that object. - mtls_enabled: If True, means the server supports MTLS integration. - bulk_event: If this is true, only one event will be emitted with all requests in the case of a v1 requirer. - """ - super().__init__(charm, relation_name, unique_key) - self.component = self.charm.app - self.request_model = request_model - self.interface = OpsRelationRepositoryInterface(charm, relation_name, request_model) - self.mtls_enabled = mtls_enabled - self.bulk_event = bulk_event - - @staticmethod - def _validate_diff(event: RelationEvent, _diff: Diff) -> None: - """Validates that entity information is not changed after relation is established. - - - When entity-type changes, backwards compatibility is broken. - - When extra-user-roles changes, role membership checks become incredibly complex. - - When extra-group-roles changes, role membership checks become incredibly complex. - """ - if not isinstance(event, RelationChangedEvent): - return - - for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: - if key in _diff.changed: - raise ValueError(f"Cannot change {key} after relation has already been created") - - def _dispatch_events(self, event: RelationEvent, _diff: Diff, request: RequirerCommonModel): - if self.mtls_enabled and "secret-mtls" in _diff.added: - getattr(self.on, "mtls_cert_updated").emit( - event.relation, app=event.app, unit=event.unit, request=request, old_mtls_cert=None - ) - return - # Emit a resource requested event if the setup key (resource name) - # was added to the relation databag, but the entity-type key was not. - if resource_added(_diff) and "entity-type" not in _diff.added: - getattr(self.on, "resource_requested").emit( - event.relation, - app=event.app, - unit=event.unit, - request=request, - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - # Emit an entity requested event if the setup key (resource name) - # was added to the relation databag, in addition to the entity-type key. - if resource_added(_diff) and "entity-type" in _diff.added: - getattr(self.on, "resource_entity_requested").emit( - event.relation, - app=event.app, - unit=event.unit, - request=request, - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - # Emit a permissions changed event if the setup key (resource name) - # was added to the relation databag, and the entity-permissions key changed. - if ( - not resource_added(_diff) - and "entity-type" not in _diff.added - and ("entity-permissions" in _diff.added or "entity-permissions" in _diff.changed) - ): - getattr(self.on, "resource_entity_permissions_changed").emit( - event.relation, app=event.app, unit=event.unit, request=request - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - @override - def _handle_event( - self, - event: RelationChangedEvent, - repository: AbstractRepository, - request: RequirerCommonModel, - ): - _diff = self.compute_diff(event.relation, request, repository) - - self._validate_diff(event, _diff) - self._dispatch_events(event, _diff, request) - - def _handle_bulk_event( - self, - event: RelationChangedEvent, - repository: AbstractRepository, - request_model: RequirerDataContractV1[TRequirerCommonModel], - ): - """Validate all the diffs, then dispatch the bulk event AND THEN stores the diff. - - This allows for the developer to process the diff and store it themselves - """ - for request in request_model.requests: - # Compute the diff without storing it so we can validate the diffs. - _diff = self.compute_diff(event.relation, request, repository, store=False) - self._validate_diff(event, _diff) - - getattr(self.on, "bulk_resources_requested").emit( - event.relation, app=event.app, unit=event.unit, requests=request_model.requests - ) - - # Store all the diffs if they were not already stored. - for request in request_model.requests: - new_data = request.model_dump( - mode="json", - exclude={"data"}, - context={"repository": repository}, - exclude_none=True, - exclude_defaults=True, - ) - store_new_data(event.relation, self.component, new_data, request.request_id) - - @override - def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: - if not self.mtls_enabled: - logger.info("MTLS is disabled, exiting early.") - return - if not event.secret.label: - return - - relation = self._relation_from_secret_label(event.secret.label) - short_uuid = self._short_uuid_from_secret_label(event.secret.label) - - if not relation: - logging.info( - f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" - ) - return - - if relation.app == self.charm.app: - logging.info("Secret changed event ignored for Secret Owner") - return - - if relation.name != self.relation_name: - logging.info("Secret changed on wrong relation.") - return - - remote_unit = None - for unit in relation.units: - if unit.app != self.charm.app: - remote_unit = unit - break - - repository = OpsRelationRepository(self.model, relation, component=relation.app) - version = repository.get_field("version") or "v0" - - old_mtls_cert = event.secret.get_content().get("mtls-cert") - logger.info("mtls-cert-updated") - - # V0, just fire the event. - if version == "v0": - request = build_model(repository, RequirerDataContractV0) - # V1, find the corresponding request. - else: - request_model = build_model(repository, RequirerDataContractV1[self.request_model]) - if not short_uuid: - return - for _request in request_model.requests: - if _request.request_id == short_uuid: - request = _request - break - else: - logger.info(f"Unknown request id {short_uuid}") - return - - getattr(self.on, "mtls_cert_updated").emit( - relation, - app=relation.app, - unit=remote_unit, - request=request, - mtls_cert=old_mtls_cert, - ) - - @override - def _on_relation_changed_event(self, event: RelationChangedEvent): - if not self.charm.unit.is_leader(): - return - - repository = OpsRelationRepository( - self.model, event.relation, component=event.relation.app - ) - - # Don't do anything until we get some data - if not repository.get_data(): - return - - version = repository.get_field("version") or "v0" - if version == "v0": - request_model = build_model(repository, RequirerDataContractV0) - old_name = request_model.original_field - request_model.request_id = None # For safety, let's ensure that we don't have a model. - self._handle_event(event, repository, request_model) - logger.info( - f"Patching databag for v0 compatibility: replacing 'resource' by '{old_name}'" - ) - self.interface.repository( - event.relation.id, - ).write_field(old_name, request_model.resource) - else: - request_model = build_model(repository, RequirerDataContractV1[self.request_model]) - if self.bulk_event: - self._handle_bulk_event(event, repository, request_model) - return - for request in request_model.requests: - self._handle_event(event, repository, request) - - def set_response(self, relation_id: int, response: ResourceProviderModel): - r"""Sets a response in the databag. - - This function will react accordingly to the version number. - If the version number is v0, then we write the data directly in the databag. - If the version number is v1, then we write the data in the list of responses. - - /!\ This function updates a response if it was already present in the databag! - - Args: - relation_id: The specific relation id for that event. - response: The response to write in the databag. - """ - if not self.charm.unit.is_leader(): - return - - relation = self.charm.model.get_relation(self.relation_name, relation_id) - - if not relation: - raise ValueError("Missing relation.") - - repository = OpsRelationRepository(self.model, relation, component=relation.app) - version = repository.get_field("version") or "v0" - - if version == "v0": - # Ensure the request_id is None - response.request_id = None - self.interface.write_model( - relation_id, response, context={"version": "v0"} - ) # {"database": "database-name", "secret-user": "uri", ...} - return - - model = self.interface.build_model(relation_id, DataContractV1[response.__class__]) - - # for/else syntax allows to execute the else if break was not called. - # This allows us to update or append easily. - for index, _response in enumerate(model.requests): - if _response.request_id == response.request_id: - model.requests[index] = response - break - else: - model.requests.append(response) - - self.interface.write_model(relation_id, model) - return - - -class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): - """Event Handler for resource requirer.""" - - on = ResourceRequiresEvents[TResourceProviderModel]() # type: ignore[reportAssignmentType] - - def __init__( - self, - charm: CharmBase, - relation_name: str, - requests: list[RequirerCommonModel], - response_model: type[TResourceProviderModel], - unique_key: str = "", - relation_aliases: list[str] | None = None, - ): - super().__init__(charm, relation_name, unique_key) - self.component = self.charm.unit - self.relation_aliases = relation_aliases - self._requests = requests - self.response_model = DataContractV1[response_model] - self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( - OpsRelationRepositoryInterface(charm, relation_name, self.response_model) - ) - - if requests: - self._request_model = requests[0].__class__ - else: - self._request_model = RequirerCommonModel - - # First, check that the number of aliases matches the one defined in charm metadata. - if self.relation_aliases: - relation_connection_limit = self.charm.meta.requires[relation_name].limit - if len(self.relation_aliases) != relation_connection_limit: - raise ValueError( - f"Invalid number of aliases, expected {relation_connection_limit}, received {len(self.relation_aliases)}" - ) - - # Created custom event names for each alias. - if self.relation_aliases: - for relation_alias in self.relation_aliases: - self.on.define_event( - f"{relation_alias}_resource_created", - ResourceCreatedEvent, - ) - self.on.define_event( - f"{relation_alias}_resource_entity_created", - ResourceEntityCreatedEvent, - ) - self.on.define_event( - f"{relation_alias}_endpoints_changed", - ResourceEndpointsChangedEvent, - ) - self.on.define_event( - f"{relation_alias}_read_only_endpoints_changed", - ResourceReadOnlyEndpointsChangedEvent, - ) - - ############################################################################## - # Extra useful functions - ############################################################################## - def is_resource_created( - self, - rel_id: int, - request_id: str, - model: DataContractV1[TResourceProviderModel] | None = None, - ) -> bool: - """Checks if a resource has been created or not. - - Args: - rel_id: The relation id to check. - request_id: The specific request id to check. - model: An optional model to use (for performances). - """ - if not model: - relation = self.model.get_relation(self.relation_name, rel_id) - if not relation: - return False - model = self.interface.build_model(relation_id=rel_id, component=relation.app) - for request in model.requests: - if request.request_id == request_id: - return request.secret_user is not None or request.secret_entity is not None - return False - - def are_all_resources_created(self, rel_id: int) -> bool: - """Checks that all resources have been created for a relation. - - Args: - rel_id: The relation id to check. - """ - relation = self.model.get_relation(self.relation_name, rel_id) - if not relation: - return False - model = self.interface.build_model(relation_id=rel_id, component=relation.app) - return all( - self.is_resource_created(rel_id, request.request_id, model) - for request in model.requests - if request.request_id - ) - - @staticmethod - def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: - # Actual checking method. - # No need to check for psycopg here, it's been checked before. - if not psycopg2: - return False - - try: - with psycopg2.connect(connection_string) as connection: - with connection.cursor() as cursor: - cursor.execute( - "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) - ) - return cursor.fetchone() is not None - except psycopg2.Error as e: - logger.exception( - f"failed to check whether {plugin} plugin is enabled in the database: %s", - str(e), - ) - return False - - def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: - """Returns whether a plugin is enabled in the database. - - Args: - plugin: name of the plugin to check. - relation_index: Optional index to check the database (default: 0 - first relation). - """ - if not psycopg2: - return False - - # Can't check a non existing relation. - if len(self.relations) <= relation_index: - return False - - relation = self.relations[relation_index] - model = self.interface.build_model(relation_id=relation.id, component=relation.app) - for request in model.requests: - if request.endpoints and request.username and request.password: - host = request.endpoints.split(":")[0] - username = request.username.get_secret_value() - password = request.password.get_secret_value() - - connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" - return self._is_pg_plugin_enabled(plugin, connection_string) - logger.info("No valid request to use to check for plugin.") - return False - - ############################################################################## - # Helpers for aliases - ############################################################################## - - def _assign_relation_alias(self, relation_id: int) -> None: - """Assigns an alias to a relation. - - This function writes in the unit data bag. - - Args: - relation_id: the identifier for a particular relation. - """ - # If no aliases were provided, return immediately. - if not self.relation_aliases: - return - - # Return if an alias was already assigned to this relation - # (like when there are more than one unit joining the relation). - relation = self.charm.model.get_relation(self.relation_name, relation_id) - if relation and relation.data[self.charm.unit].get("alias"): - return - - # Retrieve the available aliases (the ones that weren't assigned to any relation). - available_aliases = self.relation_aliases[:] - for relation in self.charm.model.relations[self.relation_name]: - alias = relation.data[self.charm.unit].get("alias") - if alias: - logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) - available_aliases.remove(alias) - - # Set the alias in the unit relation databag of the specific relation. - relation = self.charm.model.get_relation(self.relation_name, relation_id) - if relation: - relation.data[self.charm.unit].update({"alias": available_aliases[0]}) - - # We need to set relation alias also on the application level so, - # it will be accessible in show-unit juju command, executed for a consumer application unit - if relation and self.charm.unit.is_leader(): - relation.data[self.charm.app].update({"alias": available_aliases[0]}) - - def _emit_aliased_event( - self, event: RelationChangedEvent, event_name: str, response: ResourceProviderModel - ): - """Emit all aliased events.""" - alias = self._get_relation_alias(event.relation.id) - if alias: - getattr(self.on, f"{alias}_{event_name}").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - - def _get_relation_alias(self, relation_id: int) -> str | None: - """Gets the relation alias for a relation id.""" - for relation in self.charm.model.relations[self.relation_name]: - if relation.id == relation_id: - return relation.data[self.charm.unit].get("alias") - return None - - ############################################################################## - # Event Handlers - ############################################################################## - - def _on_secret_changed_event(self, event: SecretChangedEvent): - """Event notifying about a new value of a secret.""" - if not event.secret.label: - return - relation = self._relation_from_secret_label(event.secret.label) - short_uuid = self._short_uuid_from_secret_label(event.secret.label) - - if not relation: - logging.info( - f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" - ) - return - - if relation.app == self.charm.app: - logging.info("Secret changed event ignored for Secret Owner") - return - - if relation.name != self.relation_name: - logging.info("Secret changed on wrong relation.") - return - - remote_unit = None - for unit in relation.units: - if unit.app != self.charm.app: - remote_unit = unit - break - - response_model = self.interface.build_model(relation.id) - if not short_uuid: - return - for _response in response_model.requests: - if _response.request_id == short_uuid: - response = _response - break - else: - logger.info(f"Unknown request id {short_uuid}") - return - - getattr(self.on, "authentication_updated").emit( - relation, - app=relation.app, - unit=remote_unit, - response=response, - ) - - def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: - """Event emitted when the database relation is created.""" - super()._on_relation_created_event(event) - - repository = OpsRelationRepository(self.model, event.relation, self.charm.app) - - # If relations aliases were provided, assign one to the relation. - self._assign_relation_alias(event.relation.id) - - if not self.charm.unit.is_leader(): - return - - # Generate all requests id so they are saved already. - for request in self._requests: - request.request_id = gen_hash(request.resource, request.salt) - - full_request = RequirerDataContractV1[self._request_model]( - version="v1", requests=self._requests - ) - write_model(repository, full_request) - - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the database relation has changed.""" - is_subordinate = False - remote_unit_data = None - for key in event.relation.data.keys(): - if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): - remote_unit_data = event.relation.data[key] - elif isinstance(key, Application) and key.name != self.charm.app.name: - is_subordinate = event.relation.data[key].get("subordinated") == "true" - - if is_subordinate: - if not remote_unit_data or remote_unit_data.get("state") != "ready": - return - - repository = self.interface.repository(event.relation.id, event.app) - response_model = self.interface.build_model(event.relation.id, component=event.app) - - if not response_model.requests: - logger.info("Still waiting for data.") - return - - data = repository.get_field("data") - if not data: - logger.info("Missing data to compute diffs") - return - - request_map = TypeAdapter(dict[str, self._request_model]).validate_json(data) - - for response in response_model.requests: - response_id = response.request_id or gen_hash(response.resource, response.salt) - request = request_map.get(response_id, None) - if not request: - raise ValueError( - f"No request matching the response with response_id {response_id}" - ) - self._handle_event(event, repository, request, response) - - ############################################################################## - # Methods to handle specificities of relation events - ############################################################################## - - @override - def _handle_event( - self, - event: RelationChangedEvent, - repository: OpsRelationRepository, - request: RequirerCommonModel, - response: ResourceProviderModel, - ): - _diff = self.compute_diff(event.relation, response, repository, store=True) - - for newval in _diff.added: - if secret_group := response._get_secret_field(newval): - uri = getattr(response, newval.replace("-", "_")) - repository.register_secret(uri, secret_group, response.request_id) - - if "secret-user" in _diff.added and not request.entity_type: - logger.info(f"resource {response.resource} created at {datetime.now()}") - getattr(self.on, "resource_created").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "resource_created", response) - return - - if "secret-entity" in _diff.added and request.entity_type: - logger.info(f"entity {response.entity_name} created at {datetime.now()}") - getattr(self.on, "resource_entity_created").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "resource_entity_created", response) - return - - if "endpoints" in _diff.added or "endpoints" in _diff.changed: - logger.info(f"endpoints changed at {datetime.now()}") - getattr(self.on, "endpoints_changed").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "endpoints_changed", response) - return - - if "read-only-endpoints" in _diff.added or "read-only-endpoints" in _diff.changed: - logger.info(f"read-only-endpoints changed at {datetime.now()}") - getattr(self.on, "read_only_endpoints_changed").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "read_only_endpoints_changed", response) - return diff --git a/tests/v1/integration/opensearch-charm/lib/charms/data_platform_libs/v1/data_interfaces.py b/tests/v1/integration/opensearch-charm/lib/charms/data_platform_libs/v1/data_interfaces.py deleted file mode 100644 index e22388d1..00000000 --- a/tests/v1/integration/opensearch-charm/lib/charms/data_platform_libs/v1/data_interfaces.py +++ /dev/null @@ -1,2753 +0,0 @@ -# Copyright 2025 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -r"""Library to manage the relation for the data-platform products. - -This V1 has been specified in https://docs.google.com/document/d/1lnuonWnoQb36RWYwfHOBwU0VClLbawpTISXIC_yNKYo, and should be backward compatible with v0 clients. - -This library contains the Requires and Provides classes for handling the relation -between an application and multiple managed application supported by the data-team: -MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. - -#### Models - -This library exposes basic default models that can be used in most cases. -If you need more complex models, you can subclass them. - -```python -from charms.data_platform_libs.v1.data_interfaces import RequirerCommonModel, ExtraSecretStr - -class ExtendedCommonModel(RequirerCommonModel): - operator_password: ExtraSecretStr -``` - -Secret groups are handled using annotated types. If you wish to add extra secret groups, please follow the following model. The string metadata represents the secret group name, and `OptionalSecretStr` is a TypeAlias for `SecretStr | None`. Finally, `SecretStr` represents a field validating the URI pattern `secret:.*` - -```python -MyGroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] -``` - -Fields not specified as OptionalSecretStr and extended with a group name in the metadata will NOT get serialised. - - -#### Requirer Charm - -This library is a uniform interface to a selection of common database -metadata, with added custom events that add convenience to database management, -and methods to consume the application related data. - - -```python -from charms.data_platform_libs.v1.data_interfaces import ( - RequirerCommonModel, - RequirerDataContractV1, - ResourceCreatedEvent, - ResourceEntityCreatedEvent, - ResourceProviderModel, - ResourceRequirerEventHandler, -) - -class ClientCharm(CharmBase): - # Database charm that accepts connections from application charms. - def __init__(self, *args) -> None: - super().__init__(*args) - - requests = [ - RequirerCommonModel( - resource="clientdb", - ), - RequirerCommonModel( - resource="clientbis", - ), - RequirerCommonModel( - entity_type="USER", - ) - ] - self.database = ResourceRequirerEventHandler( - self,"database", requests, response_model=ResourceProviderModel - ) - self.framework.observe(self.database.on.resource_created, self._on_resource_created) - self.framework.observe(self.database.on.resource_entity_created, self._on_resource_entity_created) - - def _on_resource_created(self, event: ResourceCreatedEvent) -> None: - # Event triggered when a new database is created. - relation_id = event.relation.id - response = event.response # This is the response model - - username = event.response.username - password = event.response.password - ... - - def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: - # Event triggered when a new entity is created. - ... - -Compared to V1, this library makes heavy use of pydantic models, and allows for -multiple requests, specified as a list. -On the Requirer side, each response will trigger one custom event for that response. -This way, it allows for more strategic events to be emitted according to the request. - -As show above, the library provides some custom events to handle specific situations, which are listed below: -- resource_created: event emitted when the requested database is created. -- resource_entity_created: event emitted when the requested entity is created. -- endpoints_changed: event emitted when the read/write endpoints of the database have changed. -- read_only_endpoints_changed: event emitted when the read-only endpoints of the database - have changed. Event is not triggered if read/write endpoints changed too. - -If it is needed to connect multiple database clusters to the same relation endpoint -the application charm can implement the same code as if it would connect to only -one database cluster (like the above code example). - -To differentiate multiple clusters connected to the same relation endpoint -the application charm can use the name of the remote application: - -```python - -def _on_resource_created(self, event: ResourceCreatedEvent) -> None: - # Get the remote app name of the cluster that triggered this event - cluster = event.relation.app.name -``` - -It is also possible to provide an alias for each different database cluster/relation. - -So, it is possible to differentiate the clusters in two ways. -The first is to use the remote application name, i.e., `event.relation.app.name`, as above. - -The second way is to use different event handlers to handle each cluster events. -The implementation would be something like the following code: - -```python - -from charms.data_platform_libs.v1.data_interfaces import ( - RequirerCommonModel, - RequirerDataContractV1, - ResourceCreatedEvent, - ResourceEntityCreatedEvent, - ResourceProviderModel, - ResourceRequirerEventHandler, -) - -class ApplicationCharm(CharmBase): - # Application charm that connects to database charms. - - def __init__(self, *args): - super().__init__(*args) - - requests = [ - RequirerCommonModel( - resource="clientdb", - ), - RequirerCommonModel( - resource="clientbis", - ), - ] - # Define the cluster aliases and one handler for each cluster database created event. - self.database = ResourceRequirerEventHandler( - self, - relation_name="database" - relations_aliases = ["cluster1", "cluster2"], - requests= - ) - self.framework.observe( - self.database.on.cluster1_resource_created, self._on_cluster1_resource_created - ) - self.framework.observe( - self.database.on.cluster2_resource_created, self._on_cluster2_resource_created - ) - - def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: - # Handle the created database on the cluster named cluster1 - - # Create configuration file for app - config_file = self._render_app_config_file( - event.response.username, - event.response.password, - event.response.endpoints, - ) - ... - - def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: - # Handle the created database on the cluster named cluster2 - - # Create configuration file for app - config_file = self._render_app_config_file( - event.response.username, - event.response.password, - event.response.endpoints, - ) - ... -``` - -### Provider Charm - -Following an example of using the ResourceRequestedEvent, in the context of the -database charm code: - -```python -from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides - -class SampleCharm(CharmBase): - - def __init__(self, *args): - super().__init__(*args) - # Charm events defined in the database provides charm library. - self.provided_database = DatabaseProvides(self, relation_name="database") - self.framework.observe(self.provided_database.on.database_requested, - self._on_database_requested) - # Database generic helper - self.database = DatabaseHelper() - - def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: - # Handle the event triggered by a new database requested in the relation - # Retrieve the database name using the charm library. - db_name = event.database - # generate a new user credential - username = self.database.generate_user() - password = self.database.generate_password() - # set the credentials for the relation - self.provided_database.set_credentials(event.relation.id, username, password) - # set other variables for the relation event.set_tls("False") -``` - -As shown above, the library provides a custom event (database_requested) to handle -the situation when an application charm requests a new database to be created. -It's preferred to subscribe to this event instead of relation changed event to avoid -creating a new database when other information other than a database name is -exchanged in the relation databag. - -""" - -import copy -import hashlib -import json -import logging -import pickle -import random -import string -from abc import ABC, abstractmethod -from collections import namedtuple -from datetime import datetime -from enum import Enum -from typing import ( - Annotated, - Any, - ClassVar, - Generic, - Literal, - NewType, - TypeAlias, - TypeVar, - final, - overload, -) - -from ops import ( - CharmBase, - EventBase, - Model, - RelationChangedEvent, - RelationCreatedEvent, - RelationEvent, - Secret, - SecretChangedEvent, - SecretInfo, - SecretNotFoundError, -) -from ops.charm import CharmEvents -from ops.framework import EventSource, Handle, Object -from ops.model import Application, ModelError, Relation, Unit -from pydantic import ( - AfterValidator, - AliasChoices, - BaseModel, - ConfigDict, - Discriminator, - Field, - SecretStr, - SerializationInfo, - SerializerFunctionWrapHandler, - Tag, - TypeAdapter, - ValidationInfo, - model_serializer, - model_validator, -) -from pydantic.types import _SecretBase, _SecretField -from pydantic_core import CoreSchema, core_schema -from typing_extensions import TypeAliasType, override - -try: - import psycopg2 -except ImportError: - psycopg2 = None - -# The unique Charmhub library identifier, never change it -LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" - -# Increment this major API version when introducing breaking changes -LIBAPI = 1 - -# Increment this PATCH version before using `charmcraft publish-lib` or reset -# to 0 if you are raising the major API version -LIBPATCH = 0 - -PYDEPS = ["ops>=2.0.0", "pydantic>=2.11"] - -logger = logging.getLogger(__name__) - -MODEL_ERRORS = { - "not_leader": "this unit is not the leader", - "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", - "owner_no_refresh": "ERROR secret owner cannot use --refresh", -} - -RESOURCE_ALIASES = [ - "database", - "subject", - "topic", - "index", - "plugin-url", -] - -SECRET_PREFIX = "secret-" - - -############################################################################## -# Exceptions -############################################################################## - - -class DataInterfacesError(Exception): - """Common ancestor for DataInterfaces related exceptions.""" - - -class SecretError(DataInterfacesError): - """Common ancestor for Secrets related exceptions.""" - - -class SecretAlreadyExistsError(SecretError): - """A secret that was to be added already exists.""" - - -class SecretsUnavailableError(SecretError): - """Secrets aren't yet available for Juju version used.""" - - -class IllegalOperationError(DataInterfacesError): - """To be used when an operation is not allowed to be performed.""" - - -############################################################################## -# Global helpers / utilities -############################################################################## - - -def gen_salt() -> str: - """Generates a consistent salt.""" - return "".join(random.choices(string.ascii_letters + string.digits, k=16)) - - -def gen_hash(resource_name: str, salt: str) -> str: - """Generates a consistent hash based on the resource name and salt.""" - hasher = hashlib.sha256() - hasher.update(f"{resource_name}:{salt}".encode()) - return hasher.hexdigest()[:16] - - -def ensure_leader_for_app(f): - """Decorator to ensure that only leader can perform given operation.""" - - def wrapper(self, *args, **kwargs): - if self.component == self._local_app and not self._local_unit.is_leader(): - logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") - return - return f(self, *args, **kwargs) - - wrapper.leader_only = True - return wrapper - - -def get_encoded_dict( - relation: Relation, member: Unit | Application, field: str -) -> dict[str, Any] | None: - """Retrieve and decode an encoded field from relation data.""" - data = json.loads(relation.data[member].get(field, "{}")) - if isinstance(data, dict): - return data - logger.error("Unexpected datatype for %s instead of dict.", str(data)) - - -Diff = namedtuple("Diff", ["added", "changed", "deleted"]) -Diff.__doc__ = """ -A tuple for storing the diff between two data mappings. - -added - keys that were added -changed - keys that still exist but have new values -deleted - key that were deleted""" - - -def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: - """Retrieves the diff of the data in the relation changed databag for v1. - - Args: - old_data: dictionary of the stored data before the event. - new_data: dictionary of the received data to compute the diff. - - Returns: - a Diff instance containing the added, deleted and changed - keys from the event relation databag. - """ - old_data = old_data or {} - - # These are the keys that were added to the databag and triggered this event. - added = new_data.keys() - old_data.keys() - # These are the keys that were removed from the databag and triggered this event. - deleted = old_data.keys() - new_data.keys() - # These are the keys that already existed in the databag, - # but had their values changed. - changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} - # Return the diff with all possible changes. - return Diff(added, changed, deleted) - - -def resource_added(diff: Diff) -> bool: - """Ensures that one of the aliased resources has been added.""" - return any(item in diff.added for item in RESOURCE_ALIASES + ["resource"]) - - -def store_new_data( - relation: Relation, - component: Unit | Application, - new_data: dict[str, str], - short_uuid: str | None = None, -): - """Stores the new data in the databag for diff computation.""" - # First, the case for V0 - if not short_uuid: - relation.data[component].update({"data": json.dumps(new_data)}) - # Then the case for V1, where we have a ShortUUID - else: - data = json.loads(relation.data[component].get("data", "{}")) - if not isinstance(data, dict): - raise ValueError - newest_data = copy.deepcopy(data) - newest_data[short_uuid] = new_data - relation.data[component].update({"data": json.dumps(newest_data)}) - - -############################################################################## -# Helper classes -############################################################################## - -SecretGroup = NewType("SecretGroup", str) - - -SecretString = TypeAliasType("SecretString", Annotated[str, Field(pattern="secret:.*")]) - - -class SecretBool(_SecretField[bool]): - """Class for booleans as secrets.""" - - _inner_schema: ClassVar[CoreSchema] = core_schema.bool_schema() - _error_kind: ClassVar[str] = "bool_type" - - def _display(self) -> str: - return "****" - - -OptionalSecretStr: TypeAlias = SecretStr | None -OptionalSecretBool: TypeAlias = SecretBool | None - -OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) - -UserSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "user"] -TlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "tls"] -TlsSecretBool = Annotated[OptionalSecretBool, Field(exclude=True, default=None), "tls"] -MtlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mtls"] -ExtraSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "extra"] -EntitySecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "entity"] - - -class Scope(Enum): - """Peer relations scope.""" - - APP = "app" - UNIT = "unit" - - -class CachedSecret: - """Locally cache a secret. - - The data structure is precisely reusing/simulating as in the actual Secret Storage - """ - - KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] - - def __init__( - self, - model: Model, - component: Application | Unit, - label: str, - secret_uri: str | None = None, - ): - self._secret_meta = None - self._secret_content = {} - self._secret_uri = secret_uri - self.label = label - self._model = model - self.component = component - self.current_label = None - - @property - def meta(self) -> Secret | None: - """Getting cached secret meta-information.""" - if not self._secret_meta: - if not (self._secret_uri or self.label): - return - - try: - self._secret_meta = self._model.get_secret(label=self.label) - except SecretNotFoundError: - # Falling back to seeking for potential legacy labels - logger.info(f"Secret with label {self.label} not found") - - # If still not found, to be checked by URI, to be labelled with the proposed label - if not self._secret_meta and self._secret_uri: - self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) - return self._secret_meta - - ########################################################################## - # Public functions - ########################################################################## - - def add_secret( - self, - content: dict[str, str], - relation: Relation | None = None, - label: str | None = None, - ) -> Secret: - """Create a new secret.""" - if self._secret_uri: - raise SecretAlreadyExistsError( - "Secret is already defined with uri %s", self._secret_uri - ) - - label = self.label if not label else label - - secret = self.component.add_secret(content, label=label) - if relation and relation.app != self._model.app: - # If it's not a peer relation, grant is to be applied - secret.grant(relation) - self._secret_uri = secret.id - self._secret_meta = secret - return self._secret_meta - - def get_content(self) -> dict[str, str]: - """Getting cached secret content.""" - if not self._secret_content: - if self.meta: - try: - self._secret_content = self.meta.get_content(refresh=True) - except (ValueError, ModelError) as err: - # https://bugs.launchpad.net/juju/+bug/2042596 - # Only triggered when 'refresh' is set - if isinstance(err, ModelError) and not any( - msg in str(err) for msg in self.KNOWN_MODEL_ERRORS - ): - raise - # Due to: ValueError: Secret owner cannot use refresh=True - self._secret_content = self.meta.get_content() - return self._secret_content - - def set_content(self, content: dict[str, str]) -> None: - """Setting cached secret content.""" - if not self.meta: - return - - if content == self.get_content(): - return - - if content: - self.meta.set_content(content) - self._secret_content = content - else: - self.meta.remove_all_revisions() - - def get_info(self) -> SecretInfo | None: - """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" - if self.meta: - return self.meta.get_info() - - def remove(self) -> None: - """Remove secret.""" - if not self.meta: - raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") - try: - self.meta.remove_all_revisions() - except SecretNotFoundError: - pass - self._secret_content = {} - self._secret_meta = None - self._secret_uri = None - - -class SecretCache: - """A data structure storing CachedSecret objects.""" - - def __init__(self, model: Model, component: Application | Unit): - self._model = model - self.component = component - self._secrets: dict[str, CachedSecret] = {} - - def get(self, label: str, uri: str | None = None) -> CachedSecret | None: - """Getting a secret from Juju Secret store or cache.""" - if not self._secrets.get(label): - secret = CachedSecret(self._model, self.component, label, uri) - if secret.meta: - self._secrets[label] = secret - return self._secrets.get(label) - - def add(self, label: str, content: dict[str, str], relation: Relation) -> CachedSecret: - """Adding a secret to Juju Secret.""" - if self._secrets.get(label): - raise SecretAlreadyExistsError(f"Secret {label} already exists") - - secret = CachedSecret(self._model, self.component, label) - secret.add_secret(content, relation) - self._secrets[label] = secret - return self._secrets[label] - - def remove(self, label: str) -> None: - """Remove a secret from the cache.""" - if secret := self.get(label): - try: - secret.remove() - self._secrets.pop(label) - except (SecretsUnavailableError, KeyError): - pass - else: - return - logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) - - -############################################################################## -# Models classes -############################################################################## - - -class PeerModel(BaseModel): - """Common Model for all peer relations.""" - - model_config = ConfigDict( - validate_by_name=True, - validate_by_alias=True, - populate_by_name=True, - serialize_by_alias=True, - alias_generator=lambda x: x.replace("_", "-"), - extra="allow", - ) - - @model_validator(mode="after") - def extract_secrets(self, info: ValidationInfo): - """Extract all secret_fields into their local field.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing as we're lacking context here.") - return self - repository: AbstractRepository = info.context.get("repository") - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = SecretGroup(field_info.metadata[0]) - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret = repository.get_secret(secret_group, secret_uri=None) - - if not secret: - logger.info(f"No secret for group {secret_group}") - continue - - value = secret.get_content().get(aliased_field) - - if value and field_info.annotation == OptionalSecretBool: - value = SecretBool(json.loads(value)) - elif value: - value = SecretStr(value) - setattr(self, field, value) - - return self - - @model_serializer(mode="wrap") - def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): - """Serializes the model writing the secrets in their respective secrets.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing serialization as we're lacking context here.") - return handler(self) - repository: AbstractRepository = info.context.get("repository") - - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = SecretGroup(field_info.metadata[0]) - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret = repository.get_secret(secret_group, secret_uri=None) - - value = getattr(self, field) - - actual_value = ( - value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value - ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) - - if secret is None: - if value: - secret = repository.add_secret( - aliased_field, - actual_value, - secret_group, - ) - if not secret or not secret.meta: - raise SecretError("No secret to send back") - continue - - content = secret.get_content() - full_content = copy.deepcopy(content) - - if value is None: - full_content.pop(aliased_field, None) - else: - full_content.update({aliased_field: actual_value}) - secret.set_content(full_content) - return handler(self) - - -class CommonModel(BaseModel): - """Common Model for both requirer and provider. - - request_id stores the request identifier for easier access. - resource is the requested resource. - """ - - model_config = ConfigDict( - validate_by_name=True, - validate_by_alias=True, - populate_by_name=True, - serialize_by_alias=True, - alias_generator=lambda x: x.replace("_", "-"), - extra="allow", - ) - - resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") - request_id: str | None = Field(default=None) - salt: str = Field( - description="This salt is used to create unique hashes even when other fields map 1-1", - default_factory=gen_salt, - ) - - @model_validator(mode="after") - def extract_secrets(self, info: ValidationInfo): - """Extract all secret_fields into their local field.""" - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing as we're lacking context here.") - return self - repository: AbstractRepository = info.context.get("repository") - short_uuid = self.request_id or gen_hash(self.resource, self.salt) - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = field_info.metadata[0] - if not secret_group: - raise SecretsUnavailableError(field) - - aliased_field = field_info.serialization_alias or field - secret_field = repository.secret_field(secret_group, aliased_field).replace( - "-", "_" - ) - secret_uri: str | None = getattr(self, secret_field, None) - - if not secret_uri: - continue - - secret = repository.get_secret( - secret_group, secret_uri=secret_uri, short_uuid=short_uuid - ) - - if not secret: - logger.info(f"No secret for group {secret_group} and short uuid {short_uuid}") - continue - - value = secret.get_content().get(aliased_field) - if value and field_info.annotation == OptionalSecretBool: - value = SecretBool(json.loads(value)) - elif value: - value = SecretStr(value) - - setattr(self, field, value) - return self - - @model_serializer(mode="wrap") - def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): - """Serializes the model writing the secrets in their respective secrets.""" - _encountered_secrets: set[tuple[CachedSecret, str]] = set() - if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): - logger.debug("No secret parsing serialization as we're lacking context here.") - return handler(self) - repository: AbstractRepository = info.context.get("repository") - short_uuid = self.request_id or gen_hash(self.resource, self.salt) - # Backward compatibility for v0 regarding secrets. - if info.context.get("version") == "v0": - short_uuid = None - - for field, field_info in self.__pydantic_fields__.items(): - if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: - secret_group = field_info.metadata[0] - if not secret_group: - raise SecretsUnavailableError(field) - aliased_field = field_info.serialization_alias or field - secret_field = repository.secret_field(secret_group, aliased_field).replace( - "-", "_" - ) - secret_uri: str | None = getattr(self, secret_field, None) - secret = repository.get_secret( - secret_group, secret_uri=secret_uri, short_uuid=short_uuid - ) - - value = getattr(self, field) - - actual_value = ( - value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value - ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) - - if secret is None: - if value: - secret = repository.add_secret( - aliased_field, actual_value, secret_group, short_uuid - ) - if not secret or not secret.meta: - raise SecretError("No secret to send back") - setattr(self, secret_field, secret.meta.id) - continue - - content = secret.get_content() - full_content = copy.deepcopy(content) - - if value is None: - full_content.pop(aliased_field, None) - _encountered_secrets.add((secret, secret_field)) - else: - full_content.update({aliased_field: actual_value}) - secret.set_content(full_content) - - # Delete all empty secrets and clean up their fields. - for secret, secret_field in _encountered_secrets: - if not secret.get_content(): - # Setting a field to '' deletes it - setattr(self, secret_field, "") - repository.delete_secret(secret.label) - - return handler(self) - - @classmethod - def _get_secret_field(cls, field: str) -> SecretGroup | None: - """Checks if the field is a secret uri or not.""" - if not field.startswith(SECRET_PREFIX): - return None - - value = field.split("-")[1] - if info := cls.__pydantic_fields__.get(field.replace("-", "_")): - if info.annotation == SecretString: - return SecretGroup(value) - return None - - -class EntityPermissionModel(BaseModel): - """Entity Permissions Model.""" - - resource_name: str - resource_type: str - privileges: list - - -class RequirerCommonModel(CommonModel): - """Requirer side of the request model. - - extra_user_roles is used to request more roles for that user. - external_node_connectivity is used to indicate that the URI should be made for external clients when True - """ - - extra_user_roles: str | None = Field(default=None) - extra_group_roles: str | None = Field(default=None) - external_node_connectivity: bool = Field(default=False) - entity_type: Literal["USER", "GROUP"] | None = Field(default=None) - entity_permissions: list[EntityPermissionModel] | None = Field(default=None) - secret_mtls: SecretString | None = Field(default=None) - mtls_cert: MtlsSecretStr = Field(default=None) - - @model_validator(mode="after") - def validate_fields(self): - """Validates that no inconsistent request is being sent.""" - if self.entity_type and self.entity_type not in ["USER", "GROUP"]: - raise ValueError("Invalid entity-type. Possible values are USER and GROUP") - - if self.entity_type == "USER" and self.extra_group_roles: - raise ValueError("Inconsistent entity information. Use extra_user_roles instead") - - if self.entity_type == "GROUP" and self.extra_user_roles: - raise ValueError("Inconsistent entity information. Use extra_group_roles instead") - - return self - - -class ProviderCommonModel(CommonModel): - """Serialized fields added to the databag. - - endpoints stores the endpoints exposed to that client. - secret_user is a secret URI mapping to the user credentials - secret_tls is a secret URI mapping to the TLS certificate - secret_extra is a secret URI for all additional secrets requested. - """ - - endpoints: str | None = Field(default=None) - read_only_endpoints: str | None = Field(default=None) - secret_user: SecretString | None = Field(default=None) - secret_tls: SecretString | None = Field(default=None) - secret_extra: SecretString | None = Field(default=None) - secret_entity: SecretString | None = Field(default=None) - - -class ResourceProviderModel(ProviderCommonModel): - """Extended model including the deserialized fields.""" - - username: UserSecretStr = Field(default=None) - password: UserSecretStr = Field(default=None) - uris: UserSecretStr = Field(default=None) - read_only_uris: UserSecretStr = Field(default=None) - tls: TlsSecretBool = Field(default=None) - tls_ca: TlsSecretStr = Field(default=None) - entity_name: EntitySecretStr = Field(default=None) - entity_password: EntitySecretStr = Field(default=None) - version: str | None = Field(default=None) - - -class RequirerDataContractV0(RequirerCommonModel): - """Backward compatibility.""" - - version: Literal["v0"] = Field(default="v0") - - original_field: str = Field(exclude=True, default="") - - @model_validator(mode="before") - @classmethod - def ensure_original_field(cls, data: Any): - """Ensures that we keep the original field.""" - if isinstance(data, dict): - for alias in RESOURCE_ALIASES: - if data.get(alias) is not None: - data["original_field"] = alias - break - else: - for alias in RESOURCE_ALIASES: - if getattr(data, alias) is not None: - data.original_field = alias - return data - - -TResourceProviderModel = TypeVar("TResourceProviderModel", bound=ResourceProviderModel) -TRequirerCommonModel = TypeVar("TRequirerCommonModel", bound=RequirerCommonModel) - - -class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): - """The new Data Contract.""" - - version: Literal["v1"] = Field(default="v1") - requests: list[TRequirerCommonModel] - - -def discriminate_on_version(payload: Any) -> str: - """Use the version to discriminate.""" - if isinstance(payload, dict): - return payload.get("version", "v0") - return getattr(payload, "version", "v0") - - -RequirerDataContractType = Annotated[ - Annotated[RequirerDataContractV0, Tag("v0")] | Annotated[RequirerDataContractV1, Tag("v1")], - Discriminator(discriminate_on_version), -] - - -RequirerDataContract = TypeAdapter(RequirerDataContractType) - - -class DataContractV0(ResourceProviderModel): - """The Data contract of the response, for V0.""" - - -class DataContractV1(BaseModel, Generic[TResourceProviderModel]): - """The Data contract of the response, for V1.""" - - version: Literal["v1"] = Field(default="v1") - requests: list[TResourceProviderModel] = Field(default_factory=list) - - -DataContact = TypeAdapter(DataContractV1[ResourceProviderModel]) - - -TCommonModel = TypeVar("TCommonModel", bound=CommonModel) - - -def is_topic_value_acceptable(value: str | None) -> str | None: - """Check whether the given Kafka topic value is acceptable.""" - if value and "*" in value[:3]: - raise ValueError(f"Error on topic '{value}',, unacceptable value.") - return value - - -class KafkaRequestModel(RequirerCommonModel): - """Specialised model for Kafka.""" - - consumer_group_prefix: Annotated[str | None, AfterValidator(is_topic_value_acceptable)] = ( - Field(default=None) - ) - - -class KafkaResponseModel(ResourceProviderModel): - """Kafka response model.""" - - consumer_group_prefix: ExtraSecretStr = Field(default=None) - zookeeper_uris: ExtraSecretStr = Field(default=None) - - -############################################################################## -# AbstractRepository class -############################################################################## - - -class AbstractRepository(ABC): - """Abstract repository interface.""" - - @abstractmethod - def get_secret( - self, secret_group, secret_uri: str | None, short_uuid: str | None = None - ) -> CachedSecret | None: - """Gets a secret from the secret cache by uri or label.""" - ... - - @abstractmethod - def get_secret_field( - self, - field: str, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> str | None: - """Gets a value for a field stored in a secret group.""" - ... - - @abstractmethod - def get_field(self, field: str) -> str | None: - """Gets the value for one field.""" - ... - - @abstractmethod - def get_fields(self, *fields: str) -> dict[str, str | None]: - """Gets the values for all provided fields.""" - ... - - @abstractmethod - def write_field(self, field: str, value: Any) -> None: - """Writes the value in the field, without any secret support.""" - ... - - @abstractmethod - def write_fields(self, mapping: dict[str, Any]) -> None: - """Writes the values of mapping in the fields without any secret support (keys of mapping).""" - ... - - def write_secret_field( - self, field: str, value: Any, group: SecretGroup - ) -> CachedSecret | None: - """Writes a secret field.""" - ... - - @abstractmethod - def add_secret( - self, - field: str, - value: Any, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> CachedSecret | None: - """Gets a value for a field stored in a secret group.""" - ... - - @abstractmethod - def delete_secret(self, label: str): - """Deletes a secret by its label.""" - ... - - @abstractmethod - def delete_field(self, field: str) -> None: - """Deletes a field.""" - ... - - @abstractmethod - def delete_fields(self, *fields: str) -> None: - """Deletes all the provided fields.""" - ... - - @abstractmethod - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - """Delete a field stored in a secret group.""" - ... - - @abstractmethod - def register_secret(self, secret_group: SecretGroup, short_uuid: str | None = None) -> None: - """Registers a secret using the repository.""" - ... - - @abstractmethod - def get_data(self) -> dict[str, Any] | None: - """Gets the whole data.""" - ... - - @abstractmethod - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Builds a secret field.""" - - -class OpsRepository(AbstractRepository): - """Implementation for ops repositories, with some methods left out.""" - - SECRET_FIELD_NAME: str - - IGNORES_GROUPS: list[SecretGroup] = [] - - uri_to_databag: bool = True - - def __init__( - self, - model: Model, - relation: Relation | None, - component: Unit | Application, - ): - self._local_app = model.app - self._local_unit = model.unit - self.relation = relation - self.component = component - self.model = model - self.secrets = SecretCache(model, component) - - @abstractmethod - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None - ) -> str: - """Generate unique group mapping for secrets within a relation context.""" - ... - - @override - def get_data(self) -> dict[str, Any] | None: - ret: dict[str, Any] = {} - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - for key, value in self.relation.data[self.component].items(): - try: - ret[key] = json.loads(value) - except json.JSONDecodeError: - ret[key] = value - - return ret - - @override - @ensure_leader_for_app - def get_field( - self, - field: str, - ) -> str | None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - relation_data = self.relation.data[self.component] - return relation_data.get(field) - - @override - @ensure_leader_for_app - def get_fields(self, *fields: str) -> dict[str, str]: - res = {} - for field in fields: - if (value := self.get_field(field)) is not None: - res[field] = value - return res - - @override - @ensure_leader_for_app - def write_field(self, field: str, value: Any) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - if not value: - return None - self.relation.data[self.component].update({field: value}) - - @override - @ensure_leader_for_app - def write_fields(self, mapping: dict[str, Any]) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - (self.write_field(field, value) for field, value in mapping.items()) - - @override - @ensure_leader_for_app - def write_secret_field( - self, field: str, value: Any, secret_group: SecretGroup - ) -> CachedSecret | None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - label = self._generate_secret_label(self.relation, secret_group) - secret_uri = self.get_field(self.secret_field(secret_group, field)) - - secret = self.secrets.get(label=label, uri=secret_uri) - if not secret: - return self.add_secret(field, value, secret_group) - else: - content = secret.get_content() - full_content = copy.deepcopy(content) - full_content.update({field: value}) - secret.set_content(full_content) - return secret - - @override - @ensure_leader_for_app - def delete_field(self, field: str) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - relation_data = self.relation.data[self.component] - try: - relation_data.pop(field) - except KeyError: - logger.debug( - f"Non existent field {field} was attempted to be removed from the databag (relation ID: {self.relation.id})" - ) - - @override - @ensure_leader_for_app - def delete_fields(self, *fields: str) -> None: - (self.delete_field(field) for field in fields) - - @override - @ensure_leader_for_app - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - relation_data = self.relation.data[self.component] - secret_field = self.secret_field(secret_group, field) - - label = self._generate_secret_label(self.relation, secret_group) - secret_uri = relation_data.get(secret_field) - - secret = self.secrets.get(label=label, uri=secret_uri) - - if not secret: - logging.error(f"Can't delete secret for relation {self.relation.id}") - return None - - content = secret.get_content() - new_content = copy.deepcopy(content) - try: - new_content.pop(field) - except KeyError: - logging.debug( - f"Non-existing secret '{field}' was attempted to be removed" - f"from relation {self.relation.id} and group {secret_group}" - ) - - # Write the new secret content if necessary - if new_content: - secret.set_content(new_content) - return - - # Remove the secret from the relation if it's fully gone. - try: - relation_data.pop(field) - except KeyError: - pass - self.secrets.remove(label) - return - - @ensure_leader_for_app - def register_secret(self, uri: str, secret_group: SecretGroup, short_uuid: str | None = None): - """Registers the secret group for this relation. - - [MAGIC HERE] - If we fetch a secret using get_secret(id=, label=), - then will be "stuck" on the Secret object, whenever it may - appear (i.e. as an event attribute, or fetched manually) on future occasions. - - This will allow us to uniquely identify the secret on Provider side (typically on - 'secret-changed' events), and map it to the corresponding relation. - """ - if not self.relation: - raise ValueError("Cannot register without relation.") - - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - CachedSecret(self.model, self.component, label, uri).meta - - @override - def get_secret( - self, secret_group, secret_uri: str | None, short_uuid: str | None = None - ) -> CachedSecret | None: - """Gets a secret from the secret cache by uri or label.""" - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - return None - - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - - return self.secrets.get(label=label, uri=secret_uri) - - @override - def get_secret_field( - self, - field: str, - secret_group: SecretGroup, - uri: str | None = None, - short_uuid: str | None = None, - ) -> Any | None: - """Gets a value for a field stored in a secret group.""" - if not self.relation: - logger.info("No relation to get value from") - return None - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - return None - - secret_field = self.secret_field(secret_group, field) - - relation_data = self.relation.data[self.component] - secret_uri = uri or relation_data.get(secret_field) - label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) - - if self.uri_to_databag and not secret_uri: - logger.info(f"No secret for group {secret_group} in relation {self.relation}") - return None - - secret = self.secrets.get(label=label, uri=secret_uri) - - if not secret: - logger.info(f"No secret for group {secret_group} in relation {self.relation}") - return None - - content = secret.get_content().get(field) - - if not content: - return - - try: - return json.loads(content) - except json.JSONDecodeError: - return content - - @override - @ensure_leader_for_app - def add_secret( - self, - field: str, - value: Any, - secret_group: SecretGroup, - short_uuid: str | None = None, - ) -> CachedSecret | None: - if not self.relation: - logger.info("No relation to get value from") - return None - - if self.component not in self.relation.data: - logger.info(f"Component {self.component} not in relation {self.relation}") - return None - - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - - label = self._generate_secret_label(self.relation, secret_group, short_uuid) - - secret = self.secrets.add(label, {field: value}, self.relation) - - if not secret.meta or not secret.meta.id: - logging.error("Secret is missing Secret ID") - raise SecretError("Secret added but is missing Secret ID") - - return secret - - @override - @ensure_leader_for_app - def delete_secret(self, label: str) -> None: - self.secrets.remove(label) - - -@final -class OpsRelationRepository(OpsRepository): - """Implementation of the Abstract Repository for non peer relations.""" - - SECRET_FIELD_NAME: str = "secret" - - @override - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None - ) -> str: - """Generate unique group_mappings for secrets within a relation context.""" - if short_uuid: - return f"{relation.name}.{relation.id}.{short_uuid}.{secret_group}.secret" - return f"{relation.name}.{relation.id}.{secret_group}.secret" - - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Generates the field name to store in the peer relation.""" - return f"{self.SECRET_FIELD_NAME}-{secret_group}" - - -class OpsPeerRepository(OpsRepository): - """Implementation of the Ops Repository for peer relations.""" - - SECRET_FIELD_NAME = "internal_secret" - - IGNORES_GROUPS = [ - SecretGroup("user"), - SecretGroup("entity"), - SecretGroup("mtls"), - SecretGroup("tls"), - ] - - uri_to_databag: bool = False - - @property - def scope(self) -> Scope: - """Returns a scope.""" - if isinstance(self.component, Application): - return Scope.APP - if isinstance(self.component, Unit): - return Scope.UNIT - raise ValueError("Invalid component, neither a Unit nor an Application") - - @override - def _generate_secret_label( - self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None - ) -> str: - """Generate unique group_mappings for secrets within a relation context.""" - members = [relation.name, self._local_app.name, self.scope.value] - - if secret_group != SecretGroup("extra"): - members.append(secret_group) - return f"{'.'.join(members)}" - - def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: - """Generates the field name to store in the peer relation.""" - if not field: - raise ValueError("Must have a field.") - return f"{field}@{secret_group}" - - -@final -class OpsPeerUnitRepository(OpsPeerRepository): - """Implementation for a unit.""" - - @override - def __init__(self, model: Model, relation: Relation | None, component: Unit): - super().__init__(model, relation, component) - - -@final -class OpsOtherPeerUnitRepository(OpsPeerRepository): - """Implementation for a remote unit.""" - - @override - def __init__(self, model: Model, relation: Relation | None, component: Unit): - if component == model.unit: - raise ValueError(f"Can't instantiate {self.__class__.__name__} with local unit.") - super().__init__(model, relation, component) - - @override - def write_field(self, field: str, value: Any) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def write_fields(self, mapping: dict[str, Any]) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def add_secret( - self, field: str, value: Any, secret_group: SecretGroup, short_uuid: str | None = None - ) -> CachedSecret | None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_field(self, field: str) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_fields(self, *fields: str) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - @override - def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: - raise NotImplementedError("It's not possible to update data of another unit.") - - -TRepository = TypeVar("TRepository", bound=OpsRepository) -TCommon = TypeVar("TCommon", bound=BaseModel) -TPeerCommon = TypeVar("TPeerCommon", bound=PeerModel) -TCommonBis = TypeVar("TCommonBis", bound=BaseModel) - - -class RepositoryInterface(Generic[TRepository, TCommon]): - """Repository builder.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - component: Unit | Application, - repository_type: type[TRepository], - model: type[TCommon] | TypeAdapter | None, - ): - self.charm = charm - self._model = charm.model - self.repository_type = repository_type - self.relation_name = relation_name - self.model = model - self.component = component - - @property - def relations(self) -> list[Relation]: - """The list of Relation instances associated with this relation name.""" - return self._model.relations[self.relation_name] - - def repository( - self, relation_id: int, component: Unit | Application | None = None - ) -> TRepository: - """Returns a repository for the relation.""" - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - return self.repository_type(self._model, relation, component or self.component) - - @overload - def build_model( - self, - relation_id: int, - model: type[TCommonBis], - component: Unit | Application | None = None, - ) -> TCommonBis: ... - - @overload - def build_model( - self, - relation_id: int, - model: type[TCommon], - component: Unit | Application | None = None, - ) -> TCommon: ... - - @overload - def build_model( - self, - relation_id: int, - model: TypeAdapter[TCommonBis], - component: Unit | Application | None = None, - ) -> TCommonBis: ... - - @overload - def build_model( - self, - relation_id: int, - model: None = None, - component: Unit | Application | None = None, - ) -> TCommon: ... - - def build_model( - self, - relation_id: int, - model: type[TCommon] | TypeAdapter[TCommonBis] | None = None, - component: Unit | Application | None = None, - ) -> TCommon | TCommonBis: - """Builds a model using the repository for that relation.""" - model = model or self.model # First the provided model (allows for specialisation) - component = component or self.component - if not model: - raise ValueError("Missing model to specialise data") - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - return build_model(self.repository_type(self._model, relation, component), model) - - def write_model( - self, relation_id: int, model: BaseModel, context: dict[str, str] | None = None - ): - """Writes the model using the repository.""" - relation = self._model.get_relation(self.relation_name, relation_id) - if not relation: - raise ValueError("Missing relation.") - - write_model( - self.repository_type(self._model, relation, self.component), model, context=context - ) - - -class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, TCommon]): - """Specialised Interface to build repositories for app peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.app, OpsRelationRepository, model) - - -class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): - """Specialised Interface to build repositories for app peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.app, OpsPeerRepository, model) - - -class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): - """Specialised Interface to build repositories for this unit peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, charm.unit, OpsPeerUnitRepository, model) - - -class OpsOtherPeerUnitRepositoryInterface( - RepositoryInterface[OpsOtherPeerUnitRepository, TPeerCommon] -): - """Specialised Interface to build repositories for another unit peer relations.""" - - def __init__( - self, - charm: CharmBase, - relation_name: str, - unit: Unit, - model: type[TPeerCommon] | TypeAdapter | None = None, - ): - super().__init__(charm, relation_name, unit, OpsOtherPeerUnitRepository, model) - - -############################################################################## -# DDD implementation methods -############################################################################## -############################################################################## - - -def build_model(repository: AbstractRepository, model: type[TCommon] | TypeAdapter) -> TCommon: - """Builds a common model using the provided repository and provided model structure.""" - data = repository.get_data() or {} - - data.pop("data", None) - - # Beware this means all fields should have a default value here. - if isinstance(model, TypeAdapter): - return model.validate_python(data, context={"repository": repository}) - - return model.model_validate(data, context={"repository": repository}) - - -def write_model( - repository: AbstractRepository, model: BaseModel, context: dict[str, str] | None = None -): - """Writes the data stored in the model using the repository object.""" - context = context or {} - dumped = model.model_dump( - mode="json", context={"repository": repository} | context, exclude_none=False - ) - for field, value in dumped.items(): - if value is None: - repository.delete_field(field) - continue - dumped_value = value if isinstance(value, str) else json.dumps(value) - repository.write_field(field, dumped_value) - - -############################################################################## -# Custom Events -############################################################################## - - -class ResourceProviderEvent(EventBase, Generic[TRequirerCommonModel]): - """Resource requested event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, request - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - request: TRequirerCommonModel, - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.request = request - - def snapshot(self) -> dict[str, Any]: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["request"] = pickle.dumps(self.request) - return snapshot - - def restore(self, snapshot: dict[str, Any]): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - self.request = pickle.loads(snapshot["request"]) - - -class ResourceRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource requested event.""" - - pass - - -class ResourceEntityRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource Entity requested event.""" - - pass - - -class ResourceEntityPermissionsChangedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource entity permissions changed event.""" - - pass - - -class MtlsCertUpdatedEvent(ResourceProviderEvent[TRequirerCommonModel]): - """Resource entity permissions changed event.""" - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - request: TRequirerCommonModel, - old_mtls_cert: str | None = None, - ): - super().__init__(handle, relation, app, unit, request) - - self.old_mtls_cert = old_mtls_cert - - def snapshot(self): - """Return a snapshot of the event.""" - return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} - - def restore(self, snapshot): - """Restore the event from a snapshot.""" - super().restore(snapshot) - self.old_mtls_cert = snapshot["old_mtls_cert"] - - -class BulkResourcesRequestedEvent(EventBase, Generic[TRequirerCommonModel]): - """Resource requested event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, request - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - requests: list[TRequirerCommonModel], - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.requests = requests - - def snapshot(self) -> dict[str, Any]: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["requests"] = [pickle.dumps(request) for request in self.requests] - return snapshot - - def restore(self, snapshot: dict[str, Any]): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - self.requests = [pickle.loads(request) for request in snapshot["requests"]] - - -class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): - """Database events. - - This class defines the events that the database can emit. - """ - - bulk_resources_requested = EventSource(BulkResourcesRequestedEvent) - resource_requested = EventSource(ResourceRequestedEvent) - resource_entity_requested = EventSource(ResourceEntityRequestedEvent) - resource_entity_permissions_changed = EventSource(ResourceEntityPermissionsChangedEvent) - mtls_cert_updated = EventSource(MtlsCertUpdatedEvent) - - -class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): - """Resource created/changed event. - - Contains the request that should be handled. - - fields to serialize: relation, app, unit, response - """ - - def __init__( - self, - handle: Handle, - relation: Relation, - app: Application | None, - unit: Unit | None, - response: TResourceProviderModel, - ): - super().__init__(handle) - self.relation = relation - self.app = app - self.unit = unit - self.response = response - - def snapshot(self) -> dict: - """Save the event information.""" - snapshot = {"relation_name": self.relation.name, "relation_id": self.relation.id} - if self.app: - snapshot["app_name"] = self.app.name - if self.unit: - snapshot["unit_name"] = self.unit.name - # The models are too complex and would be blocked by marshal so we pickle dump the model. - # The full dictionary is pickled afterwards anyway. - snapshot["response"] = pickle.dumps(self.response) - return snapshot - - def restore(self, snapshot: dict): - """Restore event information.""" - relation = self.framework.model.get_relation( - snapshot["relation_name"], snapshot["relation_id"] - ) - if not relation: - raise ValueError("Missing relation") - self.relation = relation - self.app = None - app_name = snapshot.get("app_name") - if app_name: - self.app = self.framework.model.get_app(app_name) - self.unit = None - unit_name = snapshot.get("unit_name") - if unit_name: - self.app = self.framework.model.get_app(unit_name) - - self.response = pickle.loads(snapshot["response"]) - - -class ResourceCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Resource has been created.""" - - pass - - -class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Resource entity has been created.""" - - pass - - -class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read/Write enpoints are changed.""" - - pass - - -class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read-only enpoints are changed.""" - - pass - - -class AuthenticationUpdatedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Authentication was updated for a user.""" - - pass - - -class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): - """Database events. - - This class defines the events that the database can emit. - """ - - resource_created = EventSource(ResourceCreatedEvent) - resource_entity_created = EventSource(ResourceEntityCreatedEvent) - endpoints_changed = EventSource(ResourceEndpointsChangedEvent) - read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) - authentication_updated = EventSource(AuthenticationUpdatedEvent) - - -############################################################################## -# Event Handlers -############################################################################## - - -class EventHandlers(Object): - """Requires-side of the relation.""" - - component: Application | Unit - interface: RepositoryInterface - - def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ""): - """Manager of base client relations.""" - if not unique_key: - unique_key = relation_name - super().__init__(charm, unique_key) - - self.charm = charm - self.relation_name = relation_name - - self.framework.observe( - charm.on[self.relation_name].relation_changed, - self._on_relation_changed_event, - ) - - self.framework.observe( - self.charm.on[self.relation_name].relation_created, - self._on_relation_created_event, - ) - - self.framework.observe( - charm.on.secret_changed, - self._on_secret_changed_event, - ) - - @property - def relations(self) -> list[Relation]: - """Shortcut to get access to the relations.""" - return self.interface.relations - - # Event handlers - - def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: - """Event emitted when the relation is created.""" - pass - - @abstractmethod - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - - @abstractmethod - def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: - """Event emitted when the relation data has changed.""" - raise NotImplementedError - - @abstractmethod - def _handle_event( - self, - ): - """Handles the event and reacts accordingly.""" - pass - - def compute_diff( - self, - relation: Relation, - request: RequirerCommonModel | ResourceProviderModel, - repository: AbstractRepository | None = None, - store: bool = True, - ) -> Diff: - """Computes, stores and returns a diff for that request.""" - if not repository: - repository = OpsRelationRepository(self.model, relation, component=relation.app) - - # Gets the data stored in the databag for diff computation - old_data = get_encoded_dict(relation, self.component, "data") - - # In case we're V1, we select specifically this request - if old_data and request.request_id: - old_data: dict | None = old_data.get(request.request_id, None) - - # dump the data of the current request so we can compare - new_data = request.model_dump( - mode="json", - exclude={"data"}, - exclude_none=True, - exclude_defaults=True, - ) - - # Computes the diff - _diff = diff(old_data, new_data) - - if store: - # Update the databag with the new data for later diff computations - store_new_data(relation, self.component, new_data, short_uuid=request.request_id) - - return _diff - - def _relation_from_secret_label(self, secret_label: str) -> Relation | None: - """Retrieve the relation that belongs to a secret label.""" - contents = secret_label.split(".") - - if not (contents and len(contents) >= 3): - return - - try: - relation_id = int(contents[1]) - except ValueError: - return - - relation_name = contents[0] - - try: - return self.model.get_relation(relation_name, relation_id) - except ModelError: - return - - def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: - """Retrieve the relation that belongs to a secret label.""" - contents = secret_label.split(".") - - if not (contents and len(contents) >= 5): - return - - return contents[2] - - -class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): - """Event Handler for resource provider.""" - - on = ResourceProvidesEvents[TRequirerCommonModel]() # type: ignore[reportAssignmentType] - - def __init__( - self, - charm: CharmBase, - relation_name: str, - request_model: type[TRequirerCommonModel], - unique_key: str = "", - mtls_enabled: bool = False, - bulk_event: bool = False, - ): - """Builds a resource provider event handler. - - Args: - charm: The charm. - relation_name: The relation name this event handler is listening to. - request_model: The request model that is expected to be received. - unique_key: An optional unique key for that object. - mtls_enabled: If True, means the server supports MTLS integration. - bulk_event: If this is true, only one event will be emitted with all requests in the case of a v1 requirer. - """ - super().__init__(charm, relation_name, unique_key) - self.component = self.charm.app - self.request_model = request_model - self.interface = OpsRelationRepositoryInterface(charm, relation_name, request_model) - self.mtls_enabled = mtls_enabled - self.bulk_event = bulk_event - - @staticmethod - def _validate_diff(event: RelationEvent, _diff: Diff) -> None: - """Validates that entity information is not changed after relation is established. - - - When entity-type changes, backwards compatibility is broken. - - When extra-user-roles changes, role membership checks become incredibly complex. - - When extra-group-roles changes, role membership checks become incredibly complex. - """ - if not isinstance(event, RelationChangedEvent): - return - - for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: - if key in _diff.changed: - raise ValueError(f"Cannot change {key} after relation has already been created") - - def _dispatch_events(self, event: RelationEvent, _diff: Diff, request: RequirerCommonModel): - if self.mtls_enabled and "secret-mtls" in _diff.added: - getattr(self.on, "mtls_cert_updated").emit( - event.relation, app=event.app, unit=event.unit, request=request, old_mtls_cert=None - ) - return - # Emit a resource requested event if the setup key (resource name) - # was added to the relation databag, but the entity-type key was not. - if resource_added(_diff) and "entity-type" not in _diff.added: - getattr(self.on, "resource_requested").emit( - event.relation, - app=event.app, - unit=event.unit, - request=request, - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - # Emit an entity requested event if the setup key (resource name) - # was added to the relation databag, in addition to the entity-type key. - if resource_added(_diff) and "entity-type" in _diff.added: - getattr(self.on, "resource_entity_requested").emit( - event.relation, - app=event.app, - unit=event.unit, - request=request, - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - # Emit a permissions changed event if the setup key (resource name) - # was added to the relation databag, and the entity-permissions key changed. - if ( - not resource_added(_diff) - and "entity-type" not in _diff.added - and ("entity-permissions" in _diff.added or "entity-permissions" in _diff.changed) - ): - getattr(self.on, "resource_entity_permissions_changed").emit( - event.relation, app=event.app, unit=event.unit, request=request - ) - # To avoid unnecessary application restarts do not trigger other events. - return - - @override - def _handle_event( - self, - event: RelationChangedEvent, - repository: AbstractRepository, - request: RequirerCommonModel, - ): - _diff = self.compute_diff(event.relation, request, repository) - - self._validate_diff(event, _diff) - self._dispatch_events(event, _diff, request) - - def _handle_bulk_event( - self, - event: RelationChangedEvent, - repository: AbstractRepository, - request_model: RequirerDataContractV1[TRequirerCommonModel], - ): - """Validate all the diffs, then dispatch the bulk event AND THEN stores the diff. - - This allows for the developer to process the diff and store it themselves - """ - for request in request_model.requests: - # Compute the diff without storing it so we can validate the diffs. - _diff = self.compute_diff(event.relation, request, repository, store=False) - self._validate_diff(event, _diff) - - getattr(self.on, "bulk_resources_requested").emit( - event.relation, app=event.app, unit=event.unit, requests=request_model.requests - ) - - # Store all the diffs if they were not already stored. - for request in request_model.requests: - new_data = request.model_dump( - mode="json", - exclude={"data"}, - context={"repository": repository}, - exclude_none=True, - exclude_defaults=True, - ) - store_new_data(event.relation, self.component, new_data, request.request_id) - - @override - def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: - if not self.mtls_enabled: - logger.info("MTLS is disabled, exiting early.") - return - if not event.secret.label: - return - - relation = self._relation_from_secret_label(event.secret.label) - short_uuid = self._short_uuid_from_secret_label(event.secret.label) - - if not relation: - logging.info( - f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" - ) - return - - if relation.app == self.charm.app: - logging.info("Secret changed event ignored for Secret Owner") - return - - if relation.name != self.relation_name: - logging.info("Secret changed on wrong relation.") - return - - remote_unit = None - for unit in relation.units: - if unit.app != self.charm.app: - remote_unit = unit - break - - repository = OpsRelationRepository(self.model, relation, component=relation.app) - version = repository.get_field("version") or "v0" - - old_mtls_cert = event.secret.get_content().get("mtls-cert") - logger.info("mtls-cert-updated") - - # V0, just fire the event. - if version == "v0": - request = build_model(repository, RequirerDataContractV0) - # V1, find the corresponding request. - else: - request_model = build_model(repository, RequirerDataContractV1[self.request_model]) - if not short_uuid: - return - for _request in request_model.requests: - if _request.request_id == short_uuid: - request = _request - break - else: - logger.info(f"Unknown request id {short_uuid}") - return - - getattr(self.on, "mtls_cert_updated").emit( - relation, - app=relation.app, - unit=remote_unit, - request=request, - mtls_cert=old_mtls_cert, - ) - - @override - def _on_relation_changed_event(self, event: RelationChangedEvent): - if not self.charm.unit.is_leader(): - return - - repository = OpsRelationRepository( - self.model, event.relation, component=event.relation.app - ) - - # Don't do anything until we get some data - if not repository.get_data(): - return - - version = repository.get_field("version") or "v0" - if version == "v0": - request_model = build_model(repository, RequirerDataContractV0) - old_name = request_model.original_field - request_model.request_id = None # For safety, let's ensure that we don't have a model. - self._handle_event(event, repository, request_model) - logger.info( - f"Patching databag for v0 compatibility: replacing 'resource' by '{old_name}'" - ) - self.interface.repository( - event.relation.id, - ).write_field(old_name, request_model.resource) - else: - request_model = build_model(repository, RequirerDataContractV1[self.request_model]) - if self.bulk_event: - self._handle_bulk_event(event, repository, request_model) - return - for request in request_model.requests: - self._handle_event(event, repository, request) - - def set_response(self, relation_id: int, response: ResourceProviderModel): - r"""Sets a response in the databag. - - This function will react accordingly to the version number. - If the version number is v0, then we write the data directly in the databag. - If the version number is v1, then we write the data in the list of responses. - - /!\ This function updates a response if it was already present in the databag! - - Args: - relation_id: The specific relation id for that event. - response: The response to write in the databag. - """ - if not self.charm.unit.is_leader(): - return - - relation = self.charm.model.get_relation(self.relation_name, relation_id) - - if not relation: - raise ValueError("Missing relation.") - - repository = OpsRelationRepository(self.model, relation, component=relation.app) - version = repository.get_field("version") or "v0" - - if version == "v0": - # Ensure the request_id is None - response.request_id = None - self.interface.write_model( - relation_id, response, context={"version": "v0"} - ) # {"database": "database-name", "secret-user": "uri", ...} - return - - model = self.interface.build_model(relation_id, DataContractV1[response.__class__]) - - # for/else syntax allows to execute the else if break was not called. - # This allows us to update or append easily. - for index, _response in enumerate(model.requests): - if _response.request_id == response.request_id: - model.requests[index] = response - break - else: - model.requests.append(response) - - self.interface.write_model(relation_id, model) - return - - -class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): - """Event Handler for resource requirer.""" - - on = ResourceRequiresEvents[TResourceProviderModel]() # type: ignore[reportAssignmentType] - - def __init__( - self, - charm: CharmBase, - relation_name: str, - requests: list[RequirerCommonModel], - response_model: type[TResourceProviderModel], - unique_key: str = "", - relation_aliases: list[str] | None = None, - ): - super().__init__(charm, relation_name, unique_key) - self.component = self.charm.unit - self.relation_aliases = relation_aliases - self._requests = requests - self.response_model = DataContractV1[response_model] - self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( - OpsRelationRepositoryInterface(charm, relation_name, self.response_model) - ) - - if requests: - self._request_model = requests[0].__class__ - else: - self._request_model = RequirerCommonModel - - # First, check that the number of aliases matches the one defined in charm metadata. - if self.relation_aliases: - relation_connection_limit = self.charm.meta.requires[relation_name].limit - if len(self.relation_aliases) != relation_connection_limit: - raise ValueError( - f"Invalid number of aliases, expected {relation_connection_limit}, received {len(self.relation_aliases)}" - ) - - # Created custom event names for each alias. - if self.relation_aliases: - for relation_alias in self.relation_aliases: - self.on.define_event( - f"{relation_alias}_resource_created", - ResourceCreatedEvent, - ) - self.on.define_event( - f"{relation_alias}_resource_entity_created", - ResourceEntityCreatedEvent, - ) - self.on.define_event( - f"{relation_alias}_endpoints_changed", - ResourceEndpointsChangedEvent, - ) - self.on.define_event( - f"{relation_alias}_read_only_endpoints_changed", - ResourceReadOnlyEndpointsChangedEvent, - ) - - ############################################################################## - # Extra useful functions - ############################################################################## - def is_resource_created( - self, - rel_id: int, - request_id: str, - model: DataContractV1[TResourceProviderModel] | None = None, - ) -> bool: - """Checks if a resource has been created or not. - - Args: - rel_id: The relation id to check. - request_id: The specific request id to check. - model: An optional model to use (for performances). - """ - if not model: - relation = self.model.get_relation(self.relation_name, rel_id) - if not relation: - return False - model = self.interface.build_model(relation_id=rel_id, component=relation.app) - for request in model.requests: - if request.request_id == request_id: - return request.secret_user is not None or request.secret_entity is not None - return False - - def are_all_resources_created(self, rel_id: int) -> bool: - """Checks that all resources have been created for a relation. - - Args: - rel_id: The relation id to check. - """ - relation = self.model.get_relation(self.relation_name, rel_id) - if not relation: - return False - model = self.interface.build_model(relation_id=rel_id, component=relation.app) - return all( - self.is_resource_created(rel_id, request.request_id, model) - for request in model.requests - if request.request_id - ) - - @staticmethod - def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: - # Actual checking method. - # No need to check for psycopg here, it's been checked before. - if not psycopg2: - return False - - try: - with psycopg2.connect(connection_string) as connection: - with connection.cursor() as cursor: - cursor.execute( - "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) - ) - return cursor.fetchone() is not None - except psycopg2.Error as e: - logger.exception( - f"failed to check whether {plugin} plugin is enabled in the database: %s", - str(e), - ) - return False - - def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: - """Returns whether a plugin is enabled in the database. - - Args: - plugin: name of the plugin to check. - relation_index: Optional index to check the database (default: 0 - first relation). - """ - if not psycopg2: - return False - - # Can't check a non existing relation. - if len(self.relations) <= relation_index: - return False - - relation = self.relations[relation_index] - model = self.interface.build_model(relation_id=relation.id, component=relation.app) - for request in model.requests: - if request.endpoints and request.username and request.password: - host = request.endpoints.split(":")[0] - username = request.username.get_secret_value() - password = request.password.get_secret_value() - - connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" - return self._is_pg_plugin_enabled(plugin, connection_string) - logger.info("No valid request to use to check for plugin.") - return False - - ############################################################################## - # Helpers for aliases - ############################################################################## - - def _assign_relation_alias(self, relation_id: int) -> None: - """Assigns an alias to a relation. - - This function writes in the unit data bag. - - Args: - relation_id: the identifier for a particular relation. - """ - # If no aliases were provided, return immediately. - if not self.relation_aliases: - return - - # Return if an alias was already assigned to this relation - # (like when there are more than one unit joining the relation). - relation = self.charm.model.get_relation(self.relation_name, relation_id) - if relation and relation.data[self.charm.unit].get("alias"): - return - - # Retrieve the available aliases (the ones that weren't assigned to any relation). - available_aliases = self.relation_aliases[:] - for relation in self.charm.model.relations[self.relation_name]: - alias = relation.data[self.charm.unit].get("alias") - if alias: - logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) - available_aliases.remove(alias) - - # Set the alias in the unit relation databag of the specific relation. - relation = self.charm.model.get_relation(self.relation_name, relation_id) - if relation: - relation.data[self.charm.unit].update({"alias": available_aliases[0]}) - - # We need to set relation alias also on the application level so, - # it will be accessible in show-unit juju command, executed for a consumer application unit - if relation and self.charm.unit.is_leader(): - relation.data[self.charm.app].update({"alias": available_aliases[0]}) - - def _emit_aliased_event( - self, event: RelationChangedEvent, event_name: str, response: ResourceProviderModel - ): - """Emit all aliased events.""" - alias = self._get_relation_alias(event.relation.id) - if alias: - getattr(self.on, f"{alias}_{event_name}").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - - def _get_relation_alias(self, relation_id: int) -> str | None: - """Gets the relation alias for a relation id.""" - for relation in self.charm.model.relations[self.relation_name]: - if relation.id == relation_id: - return relation.data[self.charm.unit].get("alias") - return None - - ############################################################################## - # Event Handlers - ############################################################################## - - def _on_secret_changed_event(self, event: SecretChangedEvent): - """Event notifying about a new value of a secret.""" - if not event.secret.label: - return - relation = self._relation_from_secret_label(event.secret.label) - short_uuid = self._short_uuid_from_secret_label(event.secret.label) - - if not relation: - logging.info( - f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" - ) - return - - if relation.app == self.charm.app: - logging.info("Secret changed event ignored for Secret Owner") - return - - if relation.name != self.relation_name: - logging.info("Secret changed on wrong relation.") - return - - remote_unit = None - for unit in relation.units: - if unit.app != self.charm.app: - remote_unit = unit - break - - response_model = self.interface.build_model(relation.id) - if not short_uuid: - return - for _response in response_model.requests: - if _response.request_id == short_uuid: - response = _response - break - else: - logger.info(f"Unknown request id {short_uuid}") - return - - getattr(self.on, "authentication_updated").emit( - relation, - app=relation.app, - unit=remote_unit, - response=response, - ) - - def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: - """Event emitted when the database relation is created.""" - super()._on_relation_created_event(event) - - repository = OpsRelationRepository(self.model, event.relation, self.charm.app) - - # If relations aliases were provided, assign one to the relation. - self._assign_relation_alias(event.relation.id) - - if not self.charm.unit.is_leader(): - return - - # Generate all requests id so they are saved already. - for request in self._requests: - request.request_id = gen_hash(request.resource, request.salt) - - full_request = RequirerDataContractV1[self._request_model]( - version="v1", requests=self._requests - ) - write_model(repository, full_request) - - def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: - """Event emitted when the database relation has changed.""" - is_subordinate = False - remote_unit_data = None - for key in event.relation.data.keys(): - if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): - remote_unit_data = event.relation.data[key] - elif isinstance(key, Application) and key.name != self.charm.app.name: - is_subordinate = event.relation.data[key].get("subordinated") == "true" - - if is_subordinate: - if not remote_unit_data or remote_unit_data.get("state") != "ready": - return - - repository = self.interface.repository(event.relation.id, event.app) - response_model = self.interface.build_model(event.relation.id, component=event.app) - - if not response_model.requests: - logger.info("Still waiting for data.") - return - - data = repository.get_field("data") - if not data: - logger.info("Missing data to compute diffs") - return - - request_map = TypeAdapter(dict[str, self._request_model]).validate_json(data) - - for response in response_model.requests: - response_id = response.request_id or gen_hash(response.resource, response.salt) - request = request_map.get(response_id, None) - if not request: - raise ValueError( - f"No request matching the response with response_id {response_id}" - ) - self._handle_event(event, repository, request, response) - - ############################################################################## - # Methods to handle specificities of relation events - ############################################################################## - - @override - def _handle_event( - self, - event: RelationChangedEvent, - repository: OpsRelationRepository, - request: RequirerCommonModel, - response: ResourceProviderModel, - ): - _diff = self.compute_diff(event.relation, response, repository, store=True) - - for newval in _diff.added: - if secret_group := response._get_secret_field(newval): - uri = getattr(response, newval.replace("-", "_")) - repository.register_secret(uri, secret_group, response.request_id) - - if "secret-user" in _diff.added and not request.entity_type: - logger.info(f"resource {response.resource} created at {datetime.now()}") - getattr(self.on, "resource_created").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "resource_created", response) - return - - if "secret-entity" in _diff.added and request.entity_type: - logger.info(f"entity {response.entity_name} created at {datetime.now()}") - getattr(self.on, "resource_entity_created").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "resource_entity_created", response) - return - - if "endpoints" in _diff.added or "endpoints" in _diff.changed: - logger.info(f"endpoints changed at {datetime.now()}") - getattr(self.on, "endpoints_changed").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "endpoints_changed", response) - return - - if "read-only-endpoints" in _diff.added or "read-only-endpoints" in _diff.changed: - logger.info(f"read-only-endpoints changed at {datetime.now()}") - getattr(self.on, "read_only_endpoints_changed").emit( - event.relation, app=event.app, unit=event.unit, response=response - ) - self._emit_aliased_event(event, "read_only_endpoints_changed", response) - return diff --git a/tests/v1/integration/test_charm.py b/tests/v1/integration/test_charm.py index 9415f959..6936fc72 100644 --- a/tests/v1/integration/test_charm.py +++ b/tests/v1/integration/test_charm.py @@ -10,6 +10,7 @@ import psycopg2 import pytest import yaml +from more_itertools import one from pytest_operator.plugin import OpsTest from .helpers import ( @@ -835,30 +836,29 @@ async def test_an_application_can_request_multiple_databases(ops_test: OpsTest, assert first_database_connection_string != second_database_connection_string -async def test_external_node_connectivity_field(ops_test: OpsTest, application_charm): +async def test_external_node_connectivity_field(ops_test: OpsTest): # Check that the flag is missing if not requested requests = json.loads( await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, "requests" + ops_test, APPLICATION_APP_NAME, DB_FIRST_DATABASE_RELATION_NAME, "data" ) or "[]" ) - request = requests[0] + request = one(requests.values()) assert request.get("external-node-connectivity") is None # Check that the second relation raises the flag requests = json.loads( await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "requests" + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "data" ) - or "[]" + or "{}" ) - request = requests[0] - assert request.get("external-node-connectivity") == "true" + request = one(requests.values()) + assert request.get("external-node-connectivity") @pytest.mark.abort_on_fail -@pytest.mark.usefixtures("only_with_juju_secrets") async def test_relation_secret_revisions(ops_test: OpsTest): """Check that only a content change triggers the emission of a new revision.""" # Given @@ -875,7 +875,7 @@ async def test_relation_secret_revisions(ops_test: OpsTest): or "[]" ) request = requests[0] - request_id = request["request_id"] + request_id = request["request-id"] # When action = await ops_test.model.units.get(leader_name).run_action( @@ -904,7 +904,7 @@ async def test_relation_secret_revisions(ops_test: OpsTest): action = await ops_test.model.units.get(leader_name).run_action( "set-relation-field", **{ - "relation_id": pytest.second_database_relation.id, + "relation_id": rel_id, "field": "topsecret", "value": "changedvalue", }, @@ -920,94 +920,14 @@ async def test_relation_secret_revisions(ops_test: OpsTest): assert changed_secret_revision == unchanged_secret_revision -@pytest.mark.parametrize("field,value", [("new_field", "blah"), ("tls", "True")]) -@pytest.mark.usefixtures("only_without_juju_secrets") -async def test_provider_get_set_delete_fields(field, value, ops_test: OpsTest): - # Add normal field - leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) - leader_name = f"{DATABASE_APP_NAME}/{leader_id}" - - action = await ops_test.model.units.get(leader_name).run_action( - "set-relation-field", - **{ - "relation_id": pytest.second_database_relation.id, - "field": field, - "value": value, - }, - ) - await action.wait() - - requests = json.loads( - await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "requests" - ) - or "[]" - ) - request = requests[0] - assert request.get(field) == value - - # Check all application units can read remote relation data - for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: - action = await unit.run_action( - "get-relation-field", - **{ - "relation_id": pytest.second_database_relation.id, - "field": field, - }, - ) - await action.wait() - assert action.results.get("value") == value - - # Check if database can retrieve self-side relation data - action = await ops_test.model.units.get(leader_name).run_action( - "get-relation-self-side-field", - **{ - "relation_id": pytest.second_database_relation.id, - "field": field, - "value": value, - }, - ) - await action.wait() - assert action.results.get("value") == value - - # Delete normal field - action = await ops_test.model.units.get(leader_name).run_action( - "delete-relation-field", - **{"relation_id": pytest.second_database_relation.id, "field": field}, - ) - await action.wait() - - requests = json.loads( - await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "requests" - ) - or "[]" - ) - request = requests[0] - assert request.get(field) is None - - # Delete non-existent field - action = await ops_test.model.units.get(leader_name).run_action( - "delete-relation-field", - **{"relation_id": pytest.second_database_relation.id, "field": "doesnt_exist"}, - ) - await action.wait() - # Juju2 syntax - assert int(action.results["Code"]) == 0 - assert await check_logs( - ops_test, - strings=["Non-existing field 'doesnt_exist' was attempted to be removed from the databag"], - ) - - @pytest.mark.log_errors_allowed( "Non-existing field 'doesnt_exist' was attempted to be removed from the databag" ) @pytest.mark.parametrize( "field,value,relation_field", [ - ("new_field", "blah", "new_field"), - ("tls", "True", "secret-tls"), + ("new-field", "blah", "new-field"), + ("tls", "true", "secret-tls"), ], ) @pytest.mark.usefixtures("only_with_juju_secrets") @@ -1034,7 +954,7 @@ async def test_provider_get_set_delete_fields_secrets( or "[]" ) request = requests[0] - assert request.get(relation_field) + assert request.get(field) # Check all application units can read remote relation data for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: diff --git a/tox.ini b/tox.ini index 27df41de..7f4ab7c9 100644 --- a/tox.ini +++ b/tox.ini @@ -105,6 +105,7 @@ deps = juju{env:LIBJUJU_VERSION_SPECIFIER:==3.6.1.0} pytest-operator<0.43 pytest-mock + more_itertools websockets{env:WEBSOCKETS_VERSION_SPECIFIER:} -r {[vars]reqs_path}/v1/requirements.txt commands = From 8681993aba63e4b9a7c60ad2a5be11499317b19c Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Wed, 17 Sep 2025 15:12:30 +0200 Subject: [PATCH 05/34] fix: typo --- tests/v1/integration/database-charm/src/charm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/v1/integration/database-charm/src/charm.py b/tests/v1/integration/database-charm/src/charm.py index 9217cd70..c0370535 100755 --- a/tests/v1/integration/database-charm/src/charm.py +++ b/tests/v1/integration/database-charm/src/charm.py @@ -458,7 +458,7 @@ def _on_get_other_peer_relation_field(self, event: ActionEvent): event.fail("Missing relation") return for unit, interface in self.peer_units_data_interfaces.items(): - model = interface.build_model(relation.id, DataContract) + model = interface.build_model(relation.id) value[unit.name.replace("/", "-")] = getattr( model, event.params["field"].replace("-", "_") ) From df3313c847c027aceb019ac37490c28405bf5c48 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Wed, 17 Sep 2025 15:32:45 +0200 Subject: [PATCH 06/34] fix: typo --- tests/v1/integration/application-charm/src/charm.py | 1 + tests/v1/integration/test_charm.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/v1/integration/application-charm/src/charm.py b/tests/v1/integration/application-charm/src/charm.py index 3d122b8b..2e25ce44 100755 --- a/tests/v1/integration/application-charm/src/charm.py +++ b/tests/v1/integration/application-charm/src/charm.py @@ -44,6 +44,7 @@ class ExtendedResponseModel(ResourceProviderModel): topsecret: ExtraSecretStr = Field(default=None) donttellanyone: ExtraSecretStr = Field(default=None) + new_field: str | None = Field(default=None) new_field_req: str | None = Field(default=None) new_field2_req: str | None = Field(default=None) diff --git a/tests/v1/integration/test_charm.py b/tests/v1/integration/test_charm.py index 6936fc72..817b34c6 100644 --- a/tests/v1/integration/test_charm.py +++ b/tests/v1/integration/test_charm.py @@ -1023,7 +1023,7 @@ async def test_provider_deleted_secret_is_removed(ops_test: OpsTest): """The 'tls' field, that was removed in the previous test has it's secret removed.""" # Add field field = "tls" - value = "True" + value = "true" leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) leader_name = f"{DATABASE_APP_NAME}/{leader_id}" action = await ops_test.model.units.get(leader_name).run_action( From b7e260d59ace7758c1df9b433a0f6796fcdf4a1f Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Wed, 17 Sep 2025 18:21:35 +0200 Subject: [PATCH 07/34] fix: correct tests --- .../data_platform_libs/v1/data_interfaces.py | 12 ++--- tests/v1/integration/test_charm.py | 53 ++++--------------- 2 files changed, 14 insertions(+), 51 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index c4f13a85..2404b6e7 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -800,7 +800,6 @@ def extract_secrets(self, info: ValidationInfo): @model_serializer(mode="wrap") def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): """Serializes the model writing the secrets in their respective secrets.""" - _encountered_secrets: set[tuple[CachedSecret, str]] = set() if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): logger.debug("No secret parsing serialization as we're lacking context here.") return handler(self) @@ -847,17 +846,14 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ if value is None: full_content.pop(aliased_field, None) - _encountered_secrets.add((secret, secret_field)) else: full_content.update({aliased_field: actual_value}) secret.set_content(full_content) - # Delete all empty secrets and clean up their fields. - for secret, secret_field in _encountered_secrets: - if not secret.get_content(): - # Setting a field to '' deletes it - setattr(self, secret_field, "") - repository.delete_secret(secret.label) + if not full_content: + # Setting a field to '' deletes it + setattr(self, secret_field, None) + repository.delete_secret(secret.label) return handler(self) diff --git a/tests/v1/integration/test_charm.py b/tests/v1/integration/test_charm.py index 817b34c6..7a1ae653 100644 --- a/tests/v1/integration/test_charm.py +++ b/tests/v1/integration/test_charm.py @@ -924,19 +924,20 @@ async def test_relation_secret_revisions(ops_test: OpsTest): "Non-existing field 'doesnt_exist' was attempted to be removed from the databag" ) @pytest.mark.parametrize( - "field,value,relation_field", + "field,value,relation_field,output", [ - ("new-field", "blah", "new-field"), - ("tls", "true", "secret-tls"), + ("new-field", "blah", "new-field", "blah"), + ("tls", "true", "secret-tls", "True"), ], ) @pytest.mark.usefixtures("only_with_juju_secrets") async def test_provider_get_set_delete_fields_secrets( - field, value, relation_field, ops_test: OpsTest + field, value, relation_field, output, ops_test: OpsTest ): # Add field leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) leader_name = f"{DATABASE_APP_NAME}/{leader_id}" + action = await ops_test.model.units.get(leader_name).run_action( "set-relation-field", **{ @@ -954,7 +955,7 @@ async def test_provider_get_set_delete_fields_secrets( or "[]" ) request = requests[0] - assert request.get(field) + assert request.get(relation_field) # Check all application units can read remote relation data for unit in ops_test.model.applications[APPLICATION_APP_NAME].units: @@ -966,7 +967,7 @@ async def test_provider_get_set_delete_fields_secrets( }, ) await action.wait() - assert action.results.get("value") == value + assert action.results.get("value") == output # Check if database can retrieve self-side relation data action = await ops_test.model.units.get(leader_name).run_action( @@ -978,7 +979,7 @@ async def test_provider_get_set_delete_fields_secrets( }, ) await action.wait() - assert action.results.get("value") == value + assert action.results.get("value") == output # Delete field action = await ops_test.model.units.get(leader_name).run_action( @@ -1017,7 +1018,6 @@ async def test_provider_get_set_delete_fields_secrets( @pytest.mark.abort_on_fail -@pytest.mark.log_errors_allowed("Can't delete secret for relation") @pytest.mark.usefixtures("only_with_juju_secrets") async def test_provider_deleted_secret_is_removed(ops_test: OpsTest): """The 'tls' field, that was removed in the previous test has it's secret removed.""" @@ -1050,23 +1050,12 @@ async def test_provider_deleted_secret_is_removed(ops_test: OpsTest): **{"relation_id": pytest.second_database_relation.id, "field": field}, ) await action.wait() - assert not ( - await check_logs( - ops_test, - strings=["Non-existing field 'tls' was attempted to be removed from the databag"], - ) - ) - assert not (await check_logs(ops_test, strings=["Can't delete secret for relation"])) action = await ops_test.model.units.get(leader_name).run_action( "delete-relation-field", **{"relation_id": pytest.second_database_relation.id, "field": field}, ) await action.wait() - assert await check_logs( - ops_test, strings=["Non-existing field 'tls' was attempted to be removed from the databag"] - ) - assert await check_logs(ops_test, strings=["Can't delete secret for relation"]) assert ( await get_application_relation_data( @@ -1157,10 +1146,10 @@ async def test_requires_get_set_delete_fields(ops_test: OpsTest): @pytest.mark.log_errors_allowed( - "This operation (update_relation_data()) can only be performed by the leader unit" + "This operation (write_field()) can only be performed by the leader unit" ) @pytest.mark.log_errors_allowed( - "This operation (delete_relation_data()) can only be performed by the leader unit" + "This operation (delete_field()) can only be performed by the leader unit" ) async def test_provider_set_delete_fields_leader_only(ops_test: OpsTest): leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) @@ -1186,10 +1175,6 @@ async def test_provider_set_delete_fields_leader_only(ops_test: OpsTest): }, ) await action.wait() - assert await check_logs( - ops_test, - strings=["This operation (write_field) can only be performed by the leader unit"], - ) requests = json.loads( await get_application_relation_data( @@ -1208,10 +1193,6 @@ async def test_provider_set_delete_fields_leader_only(ops_test: OpsTest): **{"relation_id": pytest.second_database_relation.id, "field": "new_field"}, ) await action.wait() - assert await check_logs( - ops_test, - strings=["This operation (delete_field) can only be performed by the leader unit"], - ) requests = json.loads( await get_application_relation_data( @@ -1274,12 +1255,6 @@ async def test_requires_set_delete_fields(ops_test: OpsTest): assert request.get("new_field_req") is None -@pytest.mark.log_errors_allowed( - "This operation (update_relation_data()) can only be performed by the leader unit" -) -@pytest.mark.log_errors_allowed( - "This operation (delete_relation_data()) can only be performed by the leader unit" -) async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): leader_id = await get_leader_id(ops_test, APPLICATION_APP_NAME) leader_name = f"{APPLICATION_APP_NAME}/{leader_id}" @@ -1304,10 +1279,6 @@ async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): }, ) await action.wait() - assert await check_logs( - ops_test, - strings=["This operation (write_field) can only be performed by the leader unit"], - ) requests = json.loads( await get_application_relation_data( @@ -1327,10 +1298,6 @@ async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): **{"relation_id": pytest.second_database_relation.id, "field": "new_field-req"}, ) await action.wait() - assert await check_logs( - ops_test, - strings=["This operation (write_field()) can only be performed by the leader unit"], - ) assert ( await get_application_relation_data( From 8c7382036485e33a226f65b0ccb78bb4e19f3bd8 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Thu, 18 Sep 2025 16:29:49 +0200 Subject: [PATCH 08/34] fix: final round of fixes --- .../data_platform_libs/v1/data_interfaces.py | 13 +++- .../application-charm/actions.yaml | 6 +- .../application-charm/src/charm.py | 28 +++++-- .../integration/database-charm/src/charm.py | 4 +- tests/v1/integration/helpers.py | 2 +- tests/v1/integration/test_charm.py | 77 ++++++++----------- 6 files changed, 74 insertions(+), 56 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 2404b6e7..e4265fc3 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -368,12 +368,15 @@ def ensure_leader_for_app(f): """Decorator to ensure that only leader can perform given operation.""" def wrapper(self, *args, **kwargs): - if self.component == self._local_app and not self._local_unit.is_leader(): + if ( + self.leader_only + and self.component == self._local_app + and not self._local_unit.is_leader() + ): logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") return return f(self, *args, **kwargs) - wrapper.leader_only = True return wrapper @@ -1146,6 +1149,7 @@ def __init__( self.component = component self.model = model self.secrets = SecretCache(model, component) + self.leader_only = True @abstractmethod def _generate_secret_label( @@ -1155,6 +1159,7 @@ def _generate_secret_label( ... @override + @ensure_leader_for_app def get_data(self) -> dict[str, Any] | None: ret: dict[str, Any] = {} if not self.relation: @@ -1465,6 +1470,10 @@ class OpsPeerRepository(OpsRepository): uri_to_databag: bool = False + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.leader_only = False + @property def scope(self) -> Scope: """Returns a scope.""" diff --git a/tests/v1/integration/application-charm/actions.yaml b/tests/v1/integration/application-charm/actions.yaml index fbba614f..b64f6b9f 100644 --- a/tests/v1/integration/application-charm/actions.yaml +++ b/tests/v1/integration/application-charm/actions.yaml @@ -34,6 +34,10 @@ set-relation-field: value: type: string description: Value of the field to set + raw: + type: bool + description: Direct access + default: false delete-relation-field: description: Delete fields from the sedond-database relation @@ -43,4 +47,4 @@ delete-relation-field: set-mtls-cert: description: Sets the MTLS cert for the requirer application. - \ No newline at end of file + diff --git a/tests/v1/integration/application-charm/src/charm.py b/tests/v1/integration/application-charm/src/charm.py index 2e25ce44..62b9662b 100755 --- a/tests/v1/integration/application-charm/src/charm.py +++ b/tests/v1/integration/application-charm/src/charm.py @@ -49,6 +49,10 @@ class ExtendedResponseModel(ResourceProviderModel): new_field2_req: str | None = Field(default=None) +class ExtendedRequirerCommonModel(RequirerCommonModel): + new_field: str | None = Field(default=None) + + class ApplicationCharm(CharmBase): """Application charm that connects to database charms.""" @@ -100,7 +104,7 @@ def __init__(self, *args): charm=self, relation_name="second-database-db", requests=[ - RequirerCommonModel( + ExtendedRequirerCommonModel( resource=database_name, extra_user_roles=EXTRA_USER_ROLES, external_node_connectivity=True, @@ -337,20 +341,34 @@ def _on_get_relation_field(self, event: ActionEvent): def _on_get_relation_self_side_field(self, event: ActionEvent): """Get requested relation field (OTHER side).""" source, relation = self._get_relation(event.params["relation_id"]) - value = source.interface.repository(relation.id).get_field(event.params["field"]) + value = None + model = source.interface.build_model( + relation.id, model=RequirerDataContractV1[ExtendedRequirerCommonModel] + ) + for request in model.requests: + value = getattr(request, event.params["field"].replace("-", "_")) + value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value event.set_results({"value": value if value else ""}) def _on_set_relation_field(self, event: ActionEvent): """Set requested relation field on self-side (that's the only one writeable).""" source, relation = self._get_relation(event.params["relation_id"]) - source.interface.repository(relation.id).write_field( - event.params["field"], event.params["value"] + model = source.interface.build_model( + relation.id, model=RequirerDataContractV1[ExtendedRequirerCommonModel] ) + for request in model.requests: + setattr(request, event.params["field"].replace("-", "_"), event.params["value"]) + source.interface.write_model(relation.id, model) def _on_delete_relation_field(self, event: ActionEvent): """Delete requested relation field on self-side (that's the only one writeable).""" source, relation = self._get_relation(event.params["relation_id"]) - source.interface.repository(relation.id).delete_field(event.params["field"]) + model = source.interface.build_model( + relation.id, model=RequirerDataContractV1[ExtendedRequirerCommonModel] + ) + for request in model.requests: + setattr(request, event.params["field"].replace("-", "_"), None) + source.interface.write_model(relation.id, model) # First database events observers. def _on_first_database_created(self, event: ResourceCreatedEvent) -> None: diff --git a/tests/v1/integration/database-charm/src/charm.py b/tests/v1/integration/database-charm/src/charm.py index c0370535..66ec1f56 100755 --- a/tests/v1/integration/database-charm/src/charm.py +++ b/tests/v1/integration/database-charm/src/charm.py @@ -318,7 +318,9 @@ def _on_get_relation_field(self, event: ActionEvent): """[second_database]: Get requested relation field.""" relation = self._get_relation(event.params["relation_id"]) value = None - model = self.database.interface.build_model(relation.id, DataContract) + model = self.database.interface.build_model( + relation.id, DataContract, component=relation.app + ) for request in model.requests: value = getattr(request, event.params["field"].replace("-", "_")) value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value diff --git a/tests/v1/integration/helpers.py b/tests/v1/integration/helpers.py index e2434877..084e36b3 100644 --- a/tests/v1/integration/helpers.py +++ b/tests/v1/integration/helpers.py @@ -269,7 +269,7 @@ async def get_application_relation_data( return relation_data[0]["local-unit"].get("data", {}).get(key) -async def check_logs(ops_test: OpsTest, strings: str, limit: int = 10) -> bool: +async def check_logs(ops_test: OpsTest, strings: list[str], limit: int = 10) -> bool: """Check if any of strings may appear in juju debug-log.""" # juju debug-log may not be flushed yet, thus the "tenacity simulation" for tries in range(5): diff --git a/tests/v1/integration/test_charm.py b/tests/v1/integration/test_charm.py index 7a1ae653..3e57067c 100644 --- a/tests/v1/integration/test_charm.py +++ b/tests/v1/integration/test_charm.py @@ -15,7 +15,6 @@ from .helpers import ( build_connection_string, - check_logs, get_application_relation_data, get_juju_secret, get_leader_id, @@ -1018,7 +1017,6 @@ async def test_provider_get_set_delete_fields_secrets( @pytest.mark.abort_on_fail -@pytest.mark.usefixtures("only_with_juju_secrets") async def test_provider_deleted_secret_is_removed(ops_test: OpsTest): """The 'tls' field, that was removed in the previous test has it's secret removed.""" # Add field @@ -1037,12 +1035,14 @@ async def test_provider_deleted_secret_is_removed(ops_test: OpsTest): await action.wait() # Get TLS secret pointer - secret_uri = await get_application_relation_data( - ops_test, - APPLICATION_APP_NAME, - DB_SECOND_DATABASE_RELATION_NAME, - f"{SECRET_REF_PREFIX}{field}", + requests = json.loads( + await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "requests" + ) + or "[]" ) + request = requests[0] + secret_uri = request.get(f"{SECRET_REF_PREFIX}{field}") # Delete field action = await ops_test.model.units.get(leader_name).run_action( @@ -1051,22 +1051,14 @@ async def test_provider_deleted_secret_is_removed(ops_test: OpsTest): ) await action.wait() - action = await ops_test.model.units.get(leader_name).run_action( - "delete-relation-field", - **{"relation_id": pytest.second_database_relation.id, "field": field}, - ) - await action.wait() - - assert ( + requests = json.loads( await get_application_relation_data( - ops_test, - APPLICATION_APP_NAME, - DB_SECOND_DATABASE_RELATION_NAME, - f"{SECRET_REF_PREFIX}{field}", + ops_test, APPLICATION_APP_NAME, DB_SECOND_DATABASE_RELATION_NAME, "requests" ) - is None + or "[]" ) - + request = requests[0] + assert request.get(f"{SECRET_REF_PREFIX}{field}") is None secrets = await list_juju_secrets(ops_test) secret_xid = secret_uri.split("/")[-1] assert secret_xid not in secrets @@ -1090,15 +1082,15 @@ async def test_requires_get_set_delete_fields(ops_test: OpsTest): requests = json.loads( await get_application_relation_data( ops_test, - APPLICATION_APP_NAME, - DB_SECOND_DATABASE_RELATION_NAME, + DATABASE_APP_NAME, + DATABASE_APP_NAME, "requests", related_endpoint="second-database-db", ) or "[]" ) request = requests[0] - assert request.get("new_field") == "blah" + assert request.get("new-field") == "blah" # Check all application units can read remote relation data for unit in ops_test.model.applications[DATABASE_APP_NAME].units: @@ -1134,8 +1126,8 @@ async def test_requires_get_set_delete_fields(ops_test: OpsTest): requests = json.loads( await get_application_relation_data( ops_test, - APPLICATION_APP_NAME, - DB_SECOND_DATABASE_RELATION_NAME, + DATABASE_APP_NAME, + DATABASE_APP_NAME, "requests", related_endpoint="second-database-db", ) @@ -1146,10 +1138,7 @@ async def test_requires_get_set_delete_fields(ops_test: OpsTest): @pytest.mark.log_errors_allowed( - "This operation (write_field()) can only be performed by the leader unit" -) -@pytest.mark.log_errors_allowed( - "This operation (delete_field()) can only be performed by the leader unit" + "This operation (get_data) can only be performed by the leader unit" ) async def test_provider_set_delete_fields_leader_only(ops_test: OpsTest): leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) @@ -1204,9 +1193,10 @@ async def test_provider_set_delete_fields_leader_only(ops_test: OpsTest): or "[]" ) request = requests[0] - assert request.get("new_field") == "blah" + assert request.get("new-field") == "blah" +@pytest.mark.abort_on_fail async def test_requires_set_delete_fields(ops_test: OpsTest): # Add field leader_id = await get_leader_id(ops_test, APPLICATION_APP_NAME) @@ -1255,6 +1245,7 @@ async def test_requires_set_delete_fields(ops_test: OpsTest): assert request.get("new_field_req") is None +@pytest.mark.abort_on_fail async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): leader_id = await get_leader_id(ops_test, APPLICATION_APP_NAME) leader_name = f"{APPLICATION_APP_NAME}/{leader_id}" @@ -1262,7 +1253,7 @@ async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): "set-relation-field", **{ "relation_id": pytest.second_database_relation.id, - "field": "new_field-req", + "field": "new_field_req", "value": "blah-req", }, ) @@ -1274,7 +1265,7 @@ async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): "set-relation-field", **{ "relation_id": pytest.second_database_relation.id, - "field": "new_field2-req", + "field": "new_field2_req", "value": "blah2-req", }, ) @@ -1291,24 +1282,14 @@ async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): or "[]" ) request = requests[0] - assert request.get("new-field2-req") is None + assert request.get("new_field2_req") is None action = await ops_test.model.units.get(unit_name).run_action( "delete-relation-field", - **{"relation_id": pytest.second_database_relation.id, "field": "new_field-req"}, + **{"relation_id": pytest.second_database_relation.id, "field": "new_field_req"}, ) await action.wait() - assert ( - await get_application_relation_data( - ops_test, - DATABASE_APP_NAME, - DATABASE_APP_NAME, - "new_field-req", - related_endpoint=DB_SECOND_DATABASE_RELATION_NAME, - ) - == "blah-req" - ) requests = json.loads( await get_application_relation_data( ops_test, @@ -1320,11 +1301,15 @@ async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): or "[]" ) request = requests[0] - assert request.get("new-field-req") == "blah-req" + assert request.get("new_field_req") == "blah-req" -async def test_scaling_requires_can_access_shared_secrest(ops_test): +@pytest.mark.abort_on_fail +async def test_scaling_requires_can_access_shared_secrets(ops_test): """When scaling up the application, new units should have access to relation secrets.""" + for relation in ops_test.model.relations: + if relation.id == 10: + pytest.second_database_relation = relation await ops_test.model.applications[APPLICATION_APP_NAME].scale(3) await ops_test.model.wait_for_idle( From 1e460b866767fc2f04614d97a896b7f1e6274751 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Fri, 19 Sep 2025 09:30:30 +0200 Subject: [PATCH 09/34] fix: linting --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index 7f4ab7c9..dbf20430 100644 --- a/tox.ini +++ b/tox.ini @@ -44,6 +44,7 @@ deps = pyright ops psycopg + psycopg2-binary lint-v0: -r {[vars]reqs_path}/v0/requirements.txt lint-v1: -r {[vars]reqs_path}/v1/requirements.txt commands = From 0eb36e85775e09536431eab3eb401f40acde8a39 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Fri, 19 Sep 2025 11:27:56 +0200 Subject: [PATCH 10/34] fix: small changes --- .../integration/backward-compatibility-charm/src/charm.py | 7 +++++++ tests/v1/integration/kafka-charm/src/charm.py | 2 +- tests/v1/integration/kafka-connect-charm/src/charm.py | 2 +- tests/v1/integration/opensearch-charm/src/charm.py | 2 +- tests/v1/integration/test_kafka_connect_charm.py | 4 ++++ 5 files changed, 14 insertions(+), 3 deletions(-) diff --git a/tests/v1/integration/backward-compatibility-charm/src/charm.py b/tests/v1/integration/backward-compatibility-charm/src/charm.py index 84b5fd52..ed0917c0 100755 --- a/tests/v1/integration/backward-compatibility-charm/src/charm.py +++ b/tests/v1/integration/backward-compatibility-charm/src/charm.py @@ -28,10 +28,17 @@ class ClientCharm(CharmBase): def __init__(self, *args): super().__init__(*args) + # Default charm events. + self.framework.observe(self.on.start, self._on_start) + # Charm events defined in the database provides charm library. self.database = DatabaseRequires(self, "backward-database", "bwclient") self.framework.observe(self.database.on.database_created, self._on_resource_created) + def _on_start(self, _) -> None: + """Only sets an active status.""" + self.unit.status = ActiveStatus("Backward compatibility charm ready!") + def _on_resource_created(self, event: DatabaseCreatedEvent) -> None: """Event triggered when a new database is requested.""" relation_id = event.relation.id diff --git a/tests/v1/integration/kafka-charm/src/charm.py b/tests/v1/integration/kafka-charm/src/charm.py index 77fadf95..55c110fb 100755 --- a/tests/v1/integration/kafka-charm/src/charm.py +++ b/tests/v1/integration/kafka-charm/src/charm.py @@ -77,7 +77,7 @@ def app_peer_data(self) -> dict: if not relation: return {} - return dict(relation.data[self.app]) + return relation.data[self.app] def get_secret(self, scope: str, key: str) -> str | None: """Get secret from the secret storage.""" diff --git a/tests/v1/integration/kafka-connect-charm/src/charm.py b/tests/v1/integration/kafka-connect-charm/src/charm.py index d53b29d9..94eb8c24 100755 --- a/tests/v1/integration/kafka-connect-charm/src/charm.py +++ b/tests/v1/integration/kafka-connect-charm/src/charm.py @@ -59,7 +59,7 @@ def app_peer_data(self) -> dict: if not relation: return {} - return dict(relation.data[self.app]) + return relation.data[self.app] def get_secret(self, scope: str, key: str) -> str: """Get secret from the secret storage.""" diff --git a/tests/v1/integration/opensearch-charm/src/charm.py b/tests/v1/integration/opensearch-charm/src/charm.py index b52b20da..43b0ea9b 100755 --- a/tests/v1/integration/opensearch-charm/src/charm.py +++ b/tests/v1/integration/opensearch-charm/src/charm.py @@ -66,7 +66,7 @@ def app_peer_data(self) -> dict: if not relation: return {} - return dict(relation.data[self.app]) + return relation.data[self.app] def get_secret(self, scope: str, key: str) -> str | None: """Get secret from the secret storage.""" diff --git a/tests/v1/integration/test_kafka_connect_charm.py b/tests/v1/integration/test_kafka_connect_charm.py index e39a7e6e..f8babf88 100644 --- a/tests/v1/integration/test_kafka_connect_charm.py +++ b/tests/v1/integration/test_kafka_connect_charm.py @@ -93,6 +93,10 @@ async def test_kafka_connect_credentials_change(ops_test: OpsTest, request: pyte password = request.config.cache.get("initial_password", "") assert password == "password" + import pdb + + pdb.set_trace() + # Change connect password action = ( await ops_test.model.applications[PROVIDER_APP_NAME] From 2c2417f2feec36314ffb7b22652b014dbdf9169c Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Fri, 19 Sep 2025 11:36:21 +0200 Subject: [PATCH 11/34] fix: kafka connect green --- tests/v1/integration/test_kafka_connect_charm.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/tests/v1/integration/test_kafka_connect_charm.py b/tests/v1/integration/test_kafka_connect_charm.py index f8babf88..d4c78f5a 100644 --- a/tests/v1/integration/test_kafka_connect_charm.py +++ b/tests/v1/integration/test_kafka_connect_charm.py @@ -93,10 +93,6 @@ async def test_kafka_connect_credentials_change(ops_test: OpsTest, request: pyte password = request.config.cache.get("initial_password", "") assert password == "password" - import pdb - - pdb.set_trace() - # Change connect password action = ( await ops_test.model.applications[PROVIDER_APP_NAME] @@ -112,13 +108,6 @@ async def test_kafka_connect_credentials_change(ops_test: OpsTest, request: pyte status="active", ) - secret_uri = ( - await get_application_relation_data( - ops_test, REQUIRER_APP_NAME, SOURCE_REL, f"{PROV_SECRET_PREFIX}user" - ) - or "" - ) - requests = json.loads( await get_application_relation_data(ops_test, REQUIRER_APP_NAME, SOURCE_REL, "requests") or "[]" From 3652f55264c586d3644015ea9cad20354b312bce Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Fri, 19 Sep 2025 12:12:15 +0200 Subject: [PATCH 12/34] ensure_leader_for_app --- .../data_platform_libs/v1/data_interfaces.py | 14 +++----------- tests/v1/integration/database-charm/src/charm.py | 1 + tests/v1/integration/test_charm.py | 9 +++++++++ 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index e4265fc3..aa8e51b2 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -368,11 +368,7 @@ def ensure_leader_for_app(f): """Decorator to ensure that only leader can perform given operation.""" def wrapper(self, *args, **kwargs): - if ( - self.leader_only - and self.component == self._local_app - and not self._local_unit.is_leader() - ): + if self.component == self._local_app and not self._local_unit.is_leader(): logger.error(f"This operation ({f.__name__}) can only be performed by the leader unit") return return f(self, *args, **kwargs) @@ -1149,7 +1145,6 @@ def __init__( self.component = component self.model = model self.secrets = SecretCache(model, component) - self.leader_only = True @abstractmethod def _generate_secret_label( @@ -1159,7 +1154,6 @@ def _generate_secret_label( ... @override - @ensure_leader_for_app def get_data(self) -> dict[str, Any] | None: ret: dict[str, Any] = {} if not self.relation: @@ -1455,6 +1449,8 @@ def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> s """Generates the field name to store in the peer relation.""" return f"{self.SECRET_FIELD_NAME}-{secret_group}" + get_data = ensure_leader_for_app(OpsRepository.get_data) + class OpsPeerRepository(OpsRepository): """Implementation of the Ops Repository for peer relations.""" @@ -1470,10 +1466,6 @@ class OpsPeerRepository(OpsRepository): uri_to_databag: bool = False - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.leader_only = False - @property def scope(self) -> Scope: """Returns a scope.""" diff --git a/tests/v1/integration/database-charm/src/charm.py b/tests/v1/integration/database-charm/src/charm.py index 66ec1f56..b34872a8 100755 --- a/tests/v1/integration/database-charm/src/charm.py +++ b/tests/v1/integration/database-charm/src/charm.py @@ -63,6 +63,7 @@ class PeerUnitModel(PeerModel): secret_field: ExtraSecretStr my_unit_secret: ExtraSecretStr not_a_secret: str | None = Field(default=None) + non_secret_field: str | None = Field(default=None) class ExtendedResourceProviderModel(ResourceProviderModel): diff --git a/tests/v1/integration/test_charm.py b/tests/v1/integration/test_charm.py index 3e57067c..d32a97dd 100644 --- a/tests/v1/integration/test_charm.py +++ b/tests/v1/integration/test_charm.py @@ -1140,6 +1140,9 @@ async def test_requires_get_set_delete_fields(ops_test: OpsTest): @pytest.mark.log_errors_allowed( "This operation (get_data) can only be performed by the leader unit" ) +@pytest.mark.log_errors_allowed( + "This operation (write_field) can only be performed by the leader unit" +) async def test_provider_set_delete_fields_leader_only(ops_test: OpsTest): leader_id = await get_leader_id(ops_test, DATABASE_APP_NAME) leader_name = f"{DATABASE_APP_NAME}/{leader_id}" @@ -1246,6 +1249,12 @@ async def test_requires_set_delete_fields(ops_test: OpsTest): @pytest.mark.abort_on_fail +@pytest.mark.log_errors_allowed( + "This operation (get_data) can only be performed by the leader unit" +) +@pytest.mark.log_errors_allowed( + "This operation (write_field) can only be performed by the leader unit" +) async def test_requires_set_delete_fields_leader_only(ops_test: OpsTest): leader_id = await get_leader_id(ops_test, APPLICATION_APP_NAME) leader_name = f"{APPLICATION_APP_NAME}/{leader_id}" From 84284946392400732c22ea66f29f06fbcc5a3806 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Fri, 19 Sep 2025 13:54:59 +0200 Subject: [PATCH 13/34] fix: default factory --- lib/charms/data_platform_libs/v1/data_interfaces.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index aa8e51b2..3b4dc3b9 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -969,7 +969,7 @@ class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): """The new Data Contract.""" version: Literal["v1"] = Field(default="v1") - requests: list[TRequirerCommonModel] + requests: list[TRequirerCommonModel] = Field(default_factory=list) def discriminate_on_version(payload: Any) -> str: From e5ac83549e9f4ef9cffb5f370e921eaa67d6fea0 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Fri, 19 Sep 2025 13:58:30 +0200 Subject: [PATCH 14/34] fix: ensure delay --- tests/v1/integration/test_opensearch_charm.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/v1/integration/test_opensearch_charm.py b/tests/v1/integration/test_opensearch_charm.py index c13a27e4..99486477 100644 --- a/tests/v1/integration/test_opensearch_charm.py +++ b/tests/v1/integration/test_opensearch_charm.py @@ -100,6 +100,8 @@ async def test_opensearch_relation_secret_changed(ops_test: OpsTest): action = await ops_test.model.units.get(unit_name).run_action("change-admin-password") await action.wait() + await ops_test.model.wait_for_idle(apps=[APPLICATION_APP_NAME], status="active") + secret_content = await get_juju_secret(ops_test, secret_uri) new_password = secret_content["password"] assert password != new_password From b8aea7e0155988388ad03a2adbedb17f8bfaaefd Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Fri, 19 Sep 2025 14:18:48 +0200 Subject: [PATCH 15/34] fix: add v0 to backward compatibility charm --- .../lib/charms/data_platform_libs/v0/.gitkeep | 0 tests/v1/integration/conftest.py | 8 ++++++++ 2 files changed, 8 insertions(+) create mode 100644 tests/v1/integration/backward-compatibility-charm/lib/charms/data_platform_libs/v0/.gitkeep diff --git a/tests/v1/integration/backward-compatibility-charm/lib/charms/data_platform_libs/v0/.gitkeep b/tests/v1/integration/backward-compatibility-charm/lib/charms/data_platform_libs/v0/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/tests/v1/integration/conftest.py b/tests/v1/integration/conftest.py index f4f584b8..3d94017b 100644 --- a/tests/v1/integration/conftest.py +++ b/tests/v1/integration/conftest.py @@ -44,6 +44,14 @@ async def build_charm(charm_path, bases_index: int = None) -> Path: return ops_test +@pytest.fixture(scope="module", autouse=True) +def copy_v0_data_interfaces_library_into_charm(ops_test: OpsTest): + """Copy the data_interfaces library to the different charm folder.""" + library_path = "lib/charms/data_platform_libs/v0/data_interfaces.py" + install_path = "tests/v1/integration/backward-compatibility-charm/" + library_path + shutil.copyfile(library_path, install_path) + + @pytest.fixture(scope="module", autouse=True) def copy_data_interfaces_library_into_charm(ops_test: OpsTest): """Copy the data_interfaces library to the different charm folder.""" From a87639240c1c97e70974e623ba3acf861175b0a9 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Fri, 19 Sep 2025 14:25:57 +0200 Subject: [PATCH 16/34] fix: backward compatibility test --- .../test_backward_compatibility_charm.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/tests/v1/integration/test_backward_compatibility_charm.py b/tests/v1/integration/test_backward_compatibility_charm.py index 682102e1..814e8268 100644 --- a/tests/v1/integration/test_backward_compatibility_charm.py +++ b/tests/v1/integration/test_backward_compatibility_charm.py @@ -70,19 +70,21 @@ async def test_backward_relation_with_charm_libraries_secrets(ops_test: OpsTest) assert unit.workload_status_message == "backward_database_created" # Get the requests - requests = json.loads( + secret_uri = ( await get_application_relation_data( - ops_test, APPLICATION_APP_NAME, RELATION_NAME, "requests" + ops_test, APPLICATION_APP_NAME, RELATION_NAME, f"{PROV_SECRET_PREFIX}user" ) - or "[]" + or "" ) - request = requests[0] - secret_uri = request[f"{PROV_SECRET_PREFIX}user"] secret_data = await get_juju_secret(ops_test, secret_uri) username = secret_data["username"] password = secret_data["password"] - endpoints = request["endpoints"] - database = request["resource"] + endpoints = await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, RELATION_NAME, "endpoints" + ) + database = await get_application_relation_data( + ops_test, APPLICATION_APP_NAME, RELATION_NAME, "database" + ) assert username == f"relation_{rel.id}_None" assert len(password) == 16 From ae759a4b7e5f63e3c49d6d6a2eab9f2ff75caeb8 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Fri, 19 Sep 2025 15:04:45 +0200 Subject: [PATCH 17/34] fix: linting + tox config --- lib/charms/data_platform_libs/v1/data_interfaces.py | 4 ++-- .../test_backward_compatibility_charm.py | 1 - tox.ini | 13 +++++++++---- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 3b4dc3b9..02aabe66 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -1959,13 +1959,13 @@ class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read/Write enpoints are changed.""" + """Read/Write endpoints are changed.""" pass class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): - """Read-only enpoints are changed.""" + """Read-only endpoints are changed.""" pass diff --git a/tests/v1/integration/test_backward_compatibility_charm.py b/tests/v1/integration/test_backward_compatibility_charm.py index 814e8268..bde843f7 100644 --- a/tests/v1/integration/test_backward_compatibility_charm.py +++ b/tests/v1/integration/test_backward_compatibility_charm.py @@ -2,7 +2,6 @@ # Copyright 2022 Canonical Ltd. # See LICENSE file for licensing details. import asyncio -import json from pathlib import Path import pytest diff --git a/tox.ini b/tox.ini index dbf20430..8e775ded 100644 --- a/tox.ini +++ b/tox.ini @@ -12,6 +12,9 @@ tests_path = {tox_root}/tests reqs_path = {tox_root}/requirements lib_path = {tox_root}/lib/charms/data_platform_libs all_path = {[vars]src_path} {[vars]tests_path} {[vars]lib_path} +all_path_v0 = {[vars]src_path} {[vars]lib_path}/v0 {[vars]tests_path}/v0 +all_path_v1 = {[vars]src_path} {[vars]lib_path}/v1 {[vars]tests_path}/v1 + [testenv] set_env = @@ -53,7 +56,7 @@ commands = --skip {tox_root}/.tox \ --skip {tox_root}/build \ --skip {tox_root}/lib \ - --skip {tox_root}/tests/integration/*/lib \ + --skip {tox_root}/tests/*/integration/*/lib \ --skip {tox_root}/venv \ --skip {tox_root}/.mypy_cache \ --skip {tox_root}/icon.svg \ @@ -62,10 +65,12 @@ commands = --ignore-words-list "assertIn" lint-v0: codespell {[vars]lib_path}/v0 - lint-v1: codespell {[vars]lib_path}/v0 + lint-v1: codespell {[vars]lib_path}/v1 - ruff check {[vars]all_path} - black --check --diff {[vars]all_path} + lint-v0: ruff check {[vars]all_path_v0} + lint-v1: ruff check {[vars]all_path_v1} + lint-v0: black --check --diff {[vars]all_path_v0} + lint-v1: black --check --diff {[vars]all_path_v1} lint-v0: pyright {[vars]src_path} {[vars]lib_path}/v0 lint-v1: pyright {[vars]src_path} {[vars]lib_path}/v1 From 411ce1c69932f8e5700a86d276a9497d8e66e6c5 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Fri, 19 Sep 2025 17:02:45 +0200 Subject: [PATCH 18/34] fix: lower log level --- tests/v1/integration/backward-compatibility-charm/src/charm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/v1/integration/backward-compatibility-charm/src/charm.py b/tests/v1/integration/backward-compatibility-charm/src/charm.py index ed0917c0..e0f025c2 100755 --- a/tests/v1/integration/backward-compatibility-charm/src/charm.py +++ b/tests/v1/integration/backward-compatibility-charm/src/charm.py @@ -46,7 +46,7 @@ def _on_resource_created(self, event: DatabaseCreatedEvent) -> None: password = event.password database = event.database - logger.error( + logger.info( f"Database {database} created for relation {relation_id} with user {username} and password {password}" ) self.unit.status = ActiveStatus("backward_database_created") From 76867a2005d2d26f44397e839f47b6460c9632be Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Mon, 6 Oct 2025 14:49:08 +0200 Subject: [PATCH 19/34] fix: reviews from Sinclert' --- .../data_platform_libs/v1/data_interfaces.py | 60 ++++++++++++------- tox.ini | 2 - 2 files changed, 38 insertions(+), 24 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 02aabe66..01a73561 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -92,7 +92,7 @@ def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: # Event triggered when a new entity is created. ... -Compared to V1, this library makes heavy use of pydantic models, and allows for +Compared to V0, this library makes heavy use of pydantic models, and allows for multiple requests, specified as a list. On the Requirer side, each response will trigger one custom event for that response. This way, it allows for more strategic events to be emitted according to the request. @@ -194,29 +194,41 @@ def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: database charm code: ```python -from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides +from charms.data_platform_libs.v1.data_interfaces import ( + ResourceProviderEventHandler, + ResourceProviderModel, + ResourceRequestedEvent, + RequirerCommonModel, +) class SampleCharm(CharmBase): def __init__(self, *args): super().__init__(*args) # Charm events defined in the database provides charm library. - self.provided_database = DatabaseProvides(self, relation_name="database") - self.framework.observe(self.provided_database.on.database_requested, - self._on_database_requested) + self.provided_database = ResourceProviderEventHandler(self, "database", RequirerCommonModel) + self.framework.observe(self.provided_database.on.resource_requested, + self._on_resource_requested) # Database generic helper self.database = DatabaseHelper() - def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: + def _on_database_requested(self, event: ResourceRequestedEvent) -> None: # Handle the event triggered by a new database requested in the relation # Retrieve the database name using the charm library. - db_name = event.database + db_name = event.request.resource # generate a new user credential - username = self.database.generate_user() - password = self.database.generate_password() + username = self.database.generate_user(event.request.request_id) + password = self.database.generate_password(event.request.request_id) # set the credentials for the relation - self.provided_database.set_credentials(event.relation.id, username, password) - # set other variables for the relation event.set_tls("False") + response = ResourceProviderModel( + salt=event.request.salt, + request_id=event.request.request_id, + resource=db_name, + username=SecretStr(username), + password=SecretStr(password), + ... + ) + self.provided_database.set_response(event.relation.id, response) ``` As shown above, the library provides a custom event (database_requested) to handle @@ -1449,7 +1461,10 @@ def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> s """Generates the field name to store in the peer relation.""" return f"{self.SECRET_FIELD_NAME}-{secret_group}" - get_data = ensure_leader_for_app(OpsRepository.get_data) + @ensure_leader_for_app + @override + def get_data(self) -> dict[str, Any] | None: + return super().get_data() class OpsPeerRepository(OpsRepository): @@ -2029,6 +2044,15 @@ def relations(self) -> list[Relation]: """Shortcut to get access to the relations.""" return self.interface.relations + def get_remote_unit(self, relation: Relation) -> Unit | None: + """Gets the remote unit in the relation.""" + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + return remote_unit + # Event handlers def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: @@ -2272,11 +2296,7 @@ def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: logging.info("Secret changed on wrong relation.") return - remote_unit = None - for unit in relation.units: - if unit.app != self.charm.app: - remote_unit = unit - break + remote_unit = self.get_remote_unit(relation) repository = OpsRelationRepository(self.model, relation, component=relation.app) version = repository.get_field("version") or "v0" @@ -2616,11 +2636,7 @@ def _on_secret_changed_event(self, event: SecretChangedEvent): logging.info("Secret changed on wrong relation.") return - remote_unit = None - for unit in relation.units: - if unit.app != self.charm.app: - remote_unit = unit - break + remote_unit = self.get_remote_unit(relation) response_model = self.interface.build_model(relation.id, component=relation.app) if not short_uuid: diff --git a/tox.ini b/tox.ini index 8e775ded..619eb5e9 100644 --- a/tox.ini +++ b/tox.ini @@ -161,7 +161,6 @@ commands = [testenv:integration-kafka-v1] description = Run Kafka integration tests deps = - psycopg2-binary pytest<8.2.0 juju{env:LIBJUJU_VERSION_SPECIFIER:==3.6.1.0} pytest-operator<0.43 @@ -201,7 +200,6 @@ commands = [testenv:integration-opensearch-v1] description = Run opensearch integration tests deps = - psycopg2-binary pytest<8.2.0 juju{env:LIBJUJU_VERSION_SPECIFIER:==3.6.1.0} pytest-operator<0.43 From d0941df43f85be1cc0164383f148affe7bd580c3 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Mon, 6 Oct 2025 15:10:08 +0200 Subject: [PATCH 20/34] =?UTF-8?q?fix:=20reviews=20from=20Ren=C3=A9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- lib/charms/data_platform_libs/v1/data_interfaces.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 01a73561..3a513e20 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -329,6 +329,7 @@ def _on_database_requested(self, event: ResourceRequestedEvent) -> None: "topic", "index", "plugin-url", + "prefix", ] SECRET_PREFIX = "secret-" From 09b32d374c23e02bf9993289c8256f73fdc2e3d8 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Wed, 8 Oct 2025 10:31:59 +0200 Subject: [PATCH 21/34] fix: handle secret removal events --- .../data_platform_libs/v1/data_interfaces.py | 31 ++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 3a513e20..4ab2d299 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -275,7 +275,7 @@ def _on_database_requested(self, event: ResourceRequestedEvent) -> None: SecretInfo, SecretNotFoundError, ) -from ops.charm import CharmEvents +from ops.charm import CharmEvents, SecretRemoveEvent from ops.framework import EventSource, Handle, Object from ops.model import Application, ModelError, Relation, Unit from pydantic import ( @@ -2039,6 +2039,7 @@ def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ""): charm.on.secret_changed, self._on_secret_changed_event, ) + self.framework.observe(charm.on.secret_remove, self._on_secret_remove_event) @property def relations(self) -> list[Relation]: @@ -2070,6 +2071,34 @@ def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: """Event emitted when the relation data has changed.""" raise NotImplementedError + def _on_secret_remove_event(self, event: SecretRemoveEvent) -> None: + """Event emitted when a secret is removed. + + A secret removal (entire removal, not just a revision removal) causes + https://github.com/juju/juju/issues/20794. This check is to avoid the + errors that would happen if we tried to remove the revision in that case + (in the revision removal, the label is present). + """ + if not event.secret.label: + return + relation = self._relation_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app != self.charm.app: + logging.info("Secret removed event ignored for non Secret Owner") + return + + if relation.name != self.relation_name: + logging.info("Secret changed on wrong relation.") + return + + event.remove_revision() + @abstractmethod def _handle_event( self, From af460e375bcf033ea4b4ae7eb32316c84f152675 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Mon, 13 Oct 2025 10:36:53 +0200 Subject: [PATCH 22/34] fix: Remove ignore group + enforce resource immutability --- .../data_platform_libs/v1/data_interfaces.py | 67 +++++++++---------- 1 file changed, 32 insertions(+), 35 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 4ab2d299..a8e05db3 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -212,7 +212,7 @@ def __init__(self, *args): # Database generic helper self.database = DatabaseHelper() - def _on_database_requested(self, event: ResourceRequestedEvent) -> None: + def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: # Handle the event triggered by a new database requested in the relation # Retrieve the database name using the charm library. db_name = event.request.resource @@ -231,7 +231,7 @@ def _on_database_requested(self, event: ResourceRequestedEvent) -> None: self.provided_database.set_response(event.relation.id, response) ``` -As shown above, the library provides a custom event (database_requested) to handle +As shown above, the library provides a custom event (resource_requested) to handle the situation when an application charm requests a new database to be created. It's preferred to subscribe to this event instead of relation changed event to avoid creating a new database when other information other than a database name is @@ -247,7 +247,6 @@ def _on_database_requested(self, event: ResourceRequestedEvent) -> None: import random import string from abc import ABC, abstractmethod -from collections import namedtuple from datetime import datetime from enum import Enum from typing import ( @@ -256,6 +255,7 @@ def _on_database_requested(self, event: ResourceRequestedEvent) -> None: ClassVar, Generic, Literal, + NamedTuple, NewType, TypeAlias, TypeVar, @@ -399,13 +399,17 @@ def get_encoded_dict( logger.error("Unexpected datatype for %s instead of dict.", str(data)) -Diff = namedtuple("Diff", ["added", "changed", "deleted"]) -Diff.__doc__ = """ -A tuple for storing the diff between two data mappings. +class Diff(NamedTuple): + """A tuple for storing the diff between two data mappings. -added - keys that were added -changed - keys that still exist but have new values -deleted - key that were deleted""" + added - keys that were added + changed - keys that still exist but have new values + deleted - key that were deleted + """ + + added: set[str] + changed: set[str] + deleted: set[str] def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: @@ -443,7 +447,14 @@ def store_new_data( new_data: dict[str, str], short_uuid: str | None = None, ): - """Stores the new data in the databag for diff computation.""" + """Stores the new data in the databag for diff computation. + + Args: + relation: The relation considered to write data to + component: The component databag to write data to + new_data: a dictionary containing the data to write + short_uuid: Only present in V1, the request-id of that data to write. + """ # First, the case for V0 if not short_uuid: relation.data[component].update({"data": json.dumps(new_data)}) @@ -452,9 +463,8 @@ def store_new_data( data = json.loads(relation.data[component].get("data", "{}")) if not isinstance(data, dict): raise ValueError - newest_data = copy.deepcopy(data) - newest_data[short_uuid] = new_data - relation.data[component].update({"data": json.dumps(newest_data)}) + data[short_uuid] = new_data + relation.data[component].update({"data": json.dumps(data)}) ############################################################################## @@ -1142,8 +1152,6 @@ class OpsRepository(AbstractRepository): SECRET_FIELD_NAME: str - IGNORES_GROUPS: list[SecretGroup] = [] - uri_to_databag: bool = True def __init__( @@ -1353,9 +1361,6 @@ def get_secret( if self.component not in self.relation.data: logger.info(f"Component {self.component} not in relation {self.relation}") return None - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - return None label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) @@ -1368,7 +1373,7 @@ def get_secret_field( secret_group: SecretGroup, uri: str | None = None, short_uuid: str | None = None, - ) -> Any | None: + ) -> str | None: """Gets a value for a field stored in a secret group.""" if not self.relation: logger.info("No relation to get value from") @@ -1377,10 +1382,6 @@ def get_secret_field( logger.info(f"Component {self.component} not in relation {self.relation}") return None - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - return None - secret_field = self.secret_field(secret_group, field) relation_data = self.relation.data[self.component] @@ -1424,9 +1425,6 @@ def add_secret( logger.info(f"Component {self.component} not in relation {self.relation}") return None - if secret_group in self.IGNORES_GROUPS: - logger.warning(f"Trying to get invalid secret group {secret_group}") - label = self._generate_secret_label(self.relation, secret_group, short_uuid) secret = self.secrets.add(label, {field: value}, self.relation) @@ -1473,13 +1471,6 @@ class OpsPeerRepository(OpsRepository): SECRET_FIELD_NAME = "internal_secret" - IGNORES_GROUPS = [ - SecretGroup("user"), - SecretGroup("entity"), - SecretGroup("mtls"), - SecretGroup("tls"), - ] - uri_to_databag: bool = False @property @@ -2212,7 +2203,13 @@ def _validate_diff(event: RelationEvent, _diff: Diff) -> None: if not isinstance(event, RelationChangedEvent): return - for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: + for key in [ + "resource", + "entity-type", + "entity-permissions", + "extra-user-roles", + "extra-group-roles", + ]: if key in _diff.changed: raise ValueError(f"Cannot change {key} after relation has already been created") @@ -2355,7 +2352,7 @@ def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: app=relation.app, unit=remote_unit, request=request, - mtls_cert=old_mtls_cert, + old_mtls_cert=old_mtls_cert, ) @override From f92c04b4b8b1e8bb27cde02350abb3e8ad2f9758 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Mon, 13 Oct 2025 10:39:13 +0200 Subject: [PATCH 23/34] fix: Permission denied error --- lib/charms/data_platform_libs/v1/data_interfaces.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index a8e05db3..d386a057 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -321,6 +321,7 @@ def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: "not_leader": "this unit is not the leader", "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", "owner_no_refresh": "ERROR secret owner cannot use --refresh", + "permission_denied": "ERROR permission denied", } RESOURCE_ALIASES = [ @@ -513,7 +514,11 @@ class CachedSecret: The data structure is precisely reusing/simulating as in the actual Secret Storage """ - KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] + KNOWN_MODEL_ERRORS = [ + MODEL_ERRORS["no_label_and_uri"], + MODEL_ERRORS["owner_no_refresh"], + MODEL_ERRORS["permission_denied"], + ] def __init__( self, From ddfe6fc96990a20b7ce59fa664022d755468ee71 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Tue, 14 Oct 2025 15:29:44 +0200 Subject: [PATCH 24/34] fix: annoying bugs --- .../data_platform_libs/v1/data_interfaces.py | 56 +++++++++++++------ 1 file changed, 38 insertions(+), 18 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index d386a057..0bb85fad 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -538,19 +538,29 @@ def __init__( @property def meta(self) -> Secret | None: """Getting cached secret meta-information.""" - if not self._secret_meta: - if not (self._secret_uri or self.label): - return + if self._secret_meta: + return self._secret_meta - try: - self._secret_meta = self._model.get_secret(label=self.label) - except SecretNotFoundError: - # Falling back to seeking for potential legacy labels - logger.info(f"Secret with label {self.label} not found") + if not (self._secret_uri or self.label): + return - # If still not found, to be checked by URI, to be labelled with the proposed label - if not self._secret_meta and self._secret_uri: + try: + self._secret_meta = self._model.get_secret(label=self.label) + except SecretNotFoundError: + # Falling back to seeking for potential legacy labels + logger.info(f"Secret with label {self.label} not found") + except ModelError as err: + if not any(msg in str(err) for msg in self.KNOWN_MODEL_ERRORS): + raise + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + try: self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) + except ModelError as err: + if not any(msg in str(err) for msg in self.KNOWN_MODEL_ERRORS): + raise + return self._secret_meta ########################################################################## @@ -868,6 +878,10 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ setattr(self, secret_field, secret.meta.id) continue + if secret and secret.meta: + # In case we lost the secret uri in the structure, let's add it back. + setattr(self, secret_field, secret.meta.id) + content = secret.get_content() full_content = copy.deepcopy(content) @@ -2320,14 +2334,17 @@ def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: ) return - if relation.app == self.charm.app: - logging.info("Secret changed event ignored for Secret Owner") - return - if relation.name != self.relation_name: logging.info("Secret changed on wrong relation.") return + try: + event.secret.get_info() + logging.info("Secret changed event ignored for Secret Owner") + return + except SecretNotFoundError: + pass + remote_unit = self.get_remote_unit(relation) repository = OpsRelationRepository(self.model, relation, component=relation.app) @@ -2660,14 +2677,17 @@ def _on_secret_changed_event(self, event: SecretChangedEvent): ) return - if relation.app == self.charm.app: - logging.info("Secret changed event ignored for Secret Owner") - return - if relation.name != self.relation_name: logging.info("Secret changed on wrong relation.") return + try: + event.secret.get_info() + logging.info("Secret changed event ignored for Secret Owner") + return + except SecretNotFoundError: + pass + remote_unit = self.get_remote_unit(relation) response_model = self.interface.build_model(relation.id, component=relation.app) From 8e98c0dd54257db126a689a39e2e3a3ceed5c2a5 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Wed, 15 Oct 2025 14:39:15 +0200 Subject: [PATCH 25/34] fix: final changes --- .../data_platform_libs/v1/data_interfaces.py | 24 +++++++++++++++---- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 0bb85fad..334ddae7 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -239,6 +239,8 @@ def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: """ +from __future__ import annotations + import copy import hashlib import json @@ -296,7 +298,7 @@ def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: ) from pydantic.types import _SecretBase, _SecretField from pydantic_core import CoreSchema, core_schema -from typing_extensions import TypeAliasType, override +from typing_extensions import Self, TypeAliasType, override try: import psycopg2 @@ -548,7 +550,7 @@ def meta(self) -> Secret | None: self._secret_meta = self._model.get_secret(label=self.label) except SecretNotFoundError: # Falling back to seeking for potential legacy labels - logger.info(f"Secret with label {self.label} not found") + logger.debug(f"Secret with label {self.label} not found") except ModelError as err: if not any(msg in str(err) for msg in self.KNOWN_MODEL_ERRORS): raise @@ -794,6 +796,16 @@ class CommonModel(BaseModel): default_factory=gen_salt, ) + def update(self: Self, model: Self): + """Updates a common Model with another one.""" + # Iterate on all the fields that where explicitely set. + for item in model.model_fields_set: + # ignore the outstanding fields. + if item not in ["salt", "request_id"]: + value = getattr(model, item) + setattr(self, item, value) + return self + @model_validator(mode="after") def extract_secrets(self, info: ValidationInfo): """Extract all secret_fields into their local field.""" @@ -1041,7 +1053,7 @@ class DataContractV1(BaseModel, Generic[TResourceProviderModel]): requests: list[TResourceProviderModel] = Field(default_factory=list) -DataContact = TypeAdapter(DataContractV1[ResourceProviderModel]) +DataContract = TypeAdapter(DataContractV1[ResourceProviderModel]) TCommonModel = TypeVar("TCommonModel", bound=CommonModel) @@ -2099,7 +2111,9 @@ def _on_secret_remove_event(self, event: SecretRemoveEvent) -> None: ) return - if relation.app != self.charm.app: + try: + event.secret.get_info() + except SecretNotFoundError: logging.info("Secret removed event ignored for non Secret Owner") return @@ -2448,7 +2462,7 @@ def set_response(self, relation_id: int, response: ResourceProviderModel): # This allows us to update or append easily. for index, _response in enumerate(model.requests): if _response.request_id == response.request_id: - model.requests[index] = response + model.requests[index].update(response) break else: model.requests.append(response) From 0406c23e615071b65d0f20c992c1747392e489f1 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Wed, 15 Oct 2025 14:40:17 +0200 Subject: [PATCH 26/34] fix: final changes --- lib/charms/data_platform_libs/v1/data_interfaces.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 334ddae7..ee44e4f8 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -798,7 +798,7 @@ class CommonModel(BaseModel): def update(self: Self, model: Self): """Updates a common Model with another one.""" - # Iterate on all the fields that where explicitely set. + # Iterate on all the fields that where explicitly set. for item in model.model_fields_set: # ignore the outstanding fields. if item not in ["salt", "request_id"]: From 4d4183036f5e2b0be003c89710f87835adccc449 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Thu, 16 Oct 2025 15:50:51 +0200 Subject: [PATCH 27/34] fix: remove final + enforce secret id --- lib/charms/data_platform_libs/v1/data_interfaces.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index ee44e4f8..84eef8ef 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -261,7 +261,6 @@ def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: NewType, TypeAlias, TypeVar, - final, overload, ) @@ -890,7 +889,7 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ setattr(self, secret_field, secret.meta.id) continue - if secret and secret.meta: + if secret and secret.meta and secret.meta.id: # In case we lost the secret uri in the structure, let's add it back. setattr(self, secret_field, secret.meta.id) @@ -1472,7 +1471,6 @@ def delete_secret(self, label: str) -> None: self.secrets.remove(label) -@final class OpsRelationRepository(OpsRepository): """Implementation of the Abstract Repository for non peer relations.""" @@ -1531,7 +1529,6 @@ def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> s return f"{field}@{secret_group}" -@final class OpsPeerUnitRepository(OpsPeerRepository): """Implementation for a unit.""" @@ -1540,7 +1537,6 @@ def __init__(self, model: Model, relation: Relation | None, component: Unit): super().__init__(model, relation, component) -@final class OpsOtherPeerUnitRepository(OpsPeerRepository): """Implementation for a remote unit.""" From ee2423326913bcc0123294fe0e557a8752cccf1e Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Mon, 20 Oct 2025 18:24:47 +0200 Subject: [PATCH 28/34] fix: format --- .../data_platform_libs/v1/data_interfaces.py | 37 +++++++++---------- .../integration/database-charm/src/charm.py | 6 +-- .../dummy-database-charm/src/charm.py | 4 +- tests/v1/unit/test_data_interfaces.py | 8 ++-- 4 files changed, 27 insertions(+), 28 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 84eef8ef..739affb8 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -1584,17 +1584,16 @@ class RepositoryInterface(Generic[TRepository, TCommon]): def __init__( self, - charm: CharmBase, + model: Model, relation_name: str, component: Unit | Application, repository_type: type[TRepository], - model: type[TCommon] | TypeAdapter | None, + data_model: type[TCommon] | TypeAdapter | None, ): - self.charm = charm - self._model = charm.model + self._model = model self.repository_type = repository_type self.relation_name = relation_name - self.model = model + self.model = data_model self.component = component @property @@ -1677,11 +1676,11 @@ class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, def __init__( self, - charm: CharmBase, + model: Model, relation_name: str, - model: type[TCommon] | TypeAdapter | None = None, + data_model: type[TCommon] | TypeAdapter | None = None, ): - super().__init__(charm, relation_name, charm.app, OpsRelationRepository, model) + super().__init__(model, relation_name, model.app, OpsRelationRepository, data_model) class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): @@ -1689,11 +1688,11 @@ class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCom def __init__( self, - charm: CharmBase, + model: Model, relation_name: str, - model: type[TPeerCommon] | TypeAdapter | None = None, + data_model: type[TPeerCommon] | TypeAdapter | None = None, ): - super().__init__(charm, relation_name, charm.app, OpsPeerRepository, model) + super().__init__(model, relation_name, model.app, OpsPeerRepository, data_model) class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): @@ -1701,11 +1700,11 @@ class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, def __init__( self, - charm: CharmBase, + model: Model, relation_name: str, - model: type[TPeerCommon] | TypeAdapter | None = None, + data_model: type[TPeerCommon] | TypeAdapter | None = None, ): - super().__init__(charm, relation_name, charm.unit, OpsPeerUnitRepository, model) + super().__init__(model, relation_name, model.unit, OpsPeerUnitRepository, data_model) class OpsOtherPeerUnitRepositoryInterface( @@ -1715,12 +1714,12 @@ class OpsOtherPeerUnitRepositoryInterface( def __init__( self, - charm: CharmBase, + model: Model, relation_name: str, unit: Unit, - model: type[TPeerCommon] | TypeAdapter | None = None, + data_model: type[TPeerCommon] | TypeAdapter | None = None, ): - super().__init__(charm, relation_name, unit, OpsOtherPeerUnitRepository, model) + super().__init__(model, relation_name, unit, OpsOtherPeerUnitRepository, data_model) ############################################################################## @@ -2217,7 +2216,7 @@ def __init__( super().__init__(charm, relation_name, unique_key) self.component = self.charm.app self.request_model = request_model - self.interface = OpsRelationRepositoryInterface(charm, relation_name, request_model) + self.interface = OpsRelationRepositoryInterface(charm.model, relation_name, request_model) self.mtls_enabled = mtls_enabled self.bulk_event = bulk_event @@ -2487,7 +2486,7 @@ def __init__( self._requests = requests self.response_model = DataContractV1[response_model] self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( - OpsRelationRepositoryInterface(charm, relation_name, self.response_model) + OpsRelationRepositoryInterface(charm.model, relation_name, self.response_model) ) if requests: diff --git a/tests/v1/integration/database-charm/src/charm.py b/tests/v1/integration/database-charm/src/charm.py index b34872a8..1b8a2117 100755 --- a/tests/v1/integration/database-charm/src/charm.py +++ b/tests/v1/integration/database-charm/src/charm.py @@ -85,10 +85,10 @@ def __init__(self, *args): self._servers_data = {} self._peer_relation_app = OpsPeerRepositoryInterface( - self, relation_name=PEER, model=PeerAppModel + self.model, relation_name=PEER, data_model=PeerAppModel ) self._peer_relation_unit = OpsPeerUnitRepositoryInterface( - self, relation_name=PEER, model=PeerUnitModel + self.model, relation_name=PEER, data_model=PeerUnitModel ) # Default charm events. @@ -155,7 +155,7 @@ def peer_units_data_interfaces(self) -> dict[Unit, OpsOtherPeerUnitRepositoryInt for unit in self.peer_relation.units: if unit not in self._servers_data: self._servers_data[unit] = OpsOtherPeerUnitRepositoryInterface( - charm=self, relation_name=PEER, unit=unit, model=PeerUnitModel + model=self.model, relation_name=PEER, unit=unit, data_model=PeerUnitModel ) return self._servers_data diff --git a/tests/v1/integration/dummy-database-charm/src/charm.py b/tests/v1/integration/dummy-database-charm/src/charm.py index 05c70362..231a5590 100755 --- a/tests/v1/integration/dummy-database-charm/src/charm.py +++ b/tests/v1/integration/dummy-database-charm/src/charm.py @@ -67,11 +67,11 @@ def __init__(self, *args): self._servers_data = {} self.peer_relation_app = OpsPeerRepositoryInterface( - self, relation_name=PEER, model=PeerAppModel + self.model, relation_name=PEER, data_model=PeerAppModel ) self.peer_relation_unit = OpsPeerUnitRepositoryInterface( - self, relation_name=PEER, model=PeerAppModel + self.model, relation_name=PEER, data_model=PeerAppModel ) self.database = ResourceProviderEventHandler(self, "database", RequirerCommonModel) diff --git a/tests/v1/unit/test_data_interfaces.py b/tests/v1/unit/test_data_interfaces.py index e65d54b7..07aee16d 100644 --- a/tests/v1/unit/test_data_interfaces.py +++ b/tests/v1/unit/test_data_interfaces.py @@ -138,10 +138,10 @@ class DatabaseCharm(CharmBase): def __init__(self, *args): super().__init__(*args) self.peer_relation_app = OpsPeerRepositoryInterface( - self, PEER_RELATION_NAME, model=PeerAppModel + self.model, PEER_RELATION_NAME, data_model=PeerAppModel ) self.peer_relation_unit = OpsPeerUnitRepositoryInterface( - self, PEER_RELATION_NAME, model=PeerUnitModel + self.model, PEER_RELATION_NAME, data_model=PeerUnitModel ) self.provider = ResourceProviderEventHandler( self, DATABASE_RELATION_NAME, RequirerCommonModel @@ -194,10 +194,10 @@ class DatabaseCharmDynamicSecrets(CharmBase): def __init__(self, *args): super().__init__(*args) self.peer_relation_app = OpsPeerRepositoryInterface( - self, PEER_RELATION_NAME, model=PeerAppModel + self.model, PEER_RELATION_NAME, data_model=PeerAppModel ) self.peer_relation_unit = OpsPeerUnitRepositoryInterface( - self, PEER_RELATION_NAME, model=PeerUnitModel + self.model, PEER_RELATION_NAME, data_model=PeerUnitModel ) @property From 4f19cfbfb0535014ef4c197f92f2e47cb5da612a Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Tue, 21 Oct 2025 15:52:17 +0200 Subject: [PATCH 29/34] fix: Remove SecretStr, SecretBool and refacto * SecretStr and SecretBool where nice to prevent errors but removed the abstraction of secrets in the public interface. * Refacto the models to extract the serialization of the secrets in a specific class and allow to have custom classes depending on this base common model --- .../data_platform_libs/v1/data_interfaces.py | 108 +++++++++--------- .../application-charm/src/charm.py | 7 +- .../integration/database-charm/src/charm.py | 18 +-- .../dummy-database-charm/src/charm.py | 3 - tests/v1/integration/kafka-charm/src/charm.py | 23 ++-- .../kafka-connect-charm/src/charm.py | 12 +- .../integration/opensearch-charm/src/charm.py | 16 ++- tests/v1/unit/test_data_interfaces.py | 30 ++--- 8 files changed, 99 insertions(+), 118 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 739affb8..3befac7f 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -254,7 +254,6 @@ def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: from typing import ( Annotated, Any, - ClassVar, Generic, Literal, NamedTuple, @@ -286,7 +285,6 @@ def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: ConfigDict, Discriminator, Field, - SecretStr, SerializationInfo, SerializerFunctionWrapHandler, Tag, @@ -295,8 +293,6 @@ def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: model_serializer, model_validator, ) -from pydantic.types import _SecretBase, _SecretField -from pydantic_core import CoreSchema, core_schema from typing_extensions import Self, TypeAliasType, override try: @@ -479,18 +475,8 @@ def store_new_data( SecretString = TypeAliasType("SecretString", Annotated[str, Field(pattern="secret:.*")]) -class SecretBool(_SecretField[bool]): - """Class for booleans as secrets.""" - - _inner_schema: ClassVar[CoreSchema] = core_schema.bool_schema() - _error_kind: ClassVar[str] = "bool_type" - - def _display(self) -> str: - return "****" - - -OptionalSecretStr: TypeAlias = SecretStr | None -OptionalSecretBool: TypeAlias = SecretBool | None +OptionalSecretStr: TypeAlias = str | None +OptionalSecretBool: TypeAlias = bool | None OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) @@ -716,11 +702,8 @@ def extract_secrets(self, info: ValidationInfo): continue value = secret.get_content().get(aliased_field) - if value and field_info.annotation == OptionalSecretBool: - value = SecretBool(json.loads(value)) - elif value: - value = SecretStr(value) + value = json.loads(value) setattr(self, field, value) return self @@ -744,17 +727,14 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ value = getattr(self, field) - actual_value = ( - value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value - ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) + if value and not isinstance(value, str): + value = json.dumps(value) if secret is None: if value: secret = repository.add_secret( aliased_field, - actual_value, + value, secret_group, ) if not secret or not secret.meta: @@ -767,17 +747,13 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ if value is None: full_content.pop(aliased_field, None) else: - full_content.update({aliased_field: actual_value}) + full_content.update({aliased_field: value}) secret.set_content(full_content) return handler(self) -class CommonModel(BaseModel): - """Common Model for both requirer and provider. - - request_id stores the request identifier for easier access. - resource is the requested resource. - """ +class BaseCommonModel(BaseModel): + """Embeds the logic of parsing and serializing.""" model_config = ConfigDict( validate_by_name=True, @@ -788,13 +764,6 @@ class CommonModel(BaseModel): extra="allow", ) - resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") - request_id: str | None = Field(default=None) - salt: str = Field( - description="This salt is used to create unique hashes even when other fields map 1-1", - default_factory=gen_salt, - ) - def update(self: Self, model: Self): """Updates a common Model with another one.""" # Iterate on all the fields that where explicitly set. @@ -812,7 +781,7 @@ def extract_secrets(self, info: ValidationInfo): logger.debug("No secret parsing as we're lacking context here.") return self repository: AbstractRepository = info.context.get("repository") - short_uuid = self.request_id or gen_hash(self.resource, self.salt) + short_uuid = self.short_uuid for field, field_info in self.__pydantic_fields__.items(): if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: secret_group = field_info.metadata[0] @@ -837,22 +806,23 @@ def extract_secrets(self, info: ValidationInfo): continue value = secret.get_content().get(aliased_field) + if value and field_info.annotation == OptionalSecretBool: - value = SecretBool(json.loads(value)) - elif value: - value = SecretStr(value) + value = json.loads(value) setattr(self, field, value) + return self @model_serializer(mode="wrap") - def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): # noqa: C901 """Serializes the model writing the secrets in their respective secrets.""" if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): logger.debug("No secret parsing serialization as we're lacking context here.") return handler(self) repository: AbstractRepository = info.context.get("repository") - short_uuid = self.request_id or gen_hash(self.resource, self.salt) + + short_uuid = self.short_uuid # Backward compatibility for v0 regarding secrets. if info.context.get("version") == "v0": short_uuid = None @@ -873,16 +843,13 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ value = getattr(self, field) - actual_value = ( - value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value - ) - if not isinstance(actual_value, str): - actual_value = json.dumps(actual_value) + if value and not isinstance(value, str): + value = json.dumps(value) if secret is None: if value: secret = repository.add_secret( - aliased_field, actual_value, secret_group, short_uuid + aliased_field, value, secret_group, short_uuid ) if not secret or not secret.meta: raise SecretError("No secret to send back") @@ -899,7 +866,7 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ if value is None: full_content.pop(aliased_field, None) else: - full_content.update({aliased_field: actual_value}) + full_content.update({aliased_field: value}) secret.set_content(full_content) if not full_content: @@ -921,6 +888,41 @@ def _get_secret_field(cls, field: str) -> SecretGroup | None: return SecretGroup(value) return None + @property + def short_uuid(self) -> str | None: + """The request id.""" + return None + + +class CommonModel(BaseCommonModel): + """Common Model for both requirer and provider. + + request_id stores the request identifier for easier access. + salt is used to create a valid request id. + resource is the requested resource. + """ + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace("_", "-"), + extra="allow", + ) + + resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default="") + request_id: str | None = Field(default=None) + salt: str = Field( + description="This salt is used to create unique hashes even when other fields map 1-1", + default_factory=gen_salt, + ) + + @property + def short_uuid(self) -> str | None: + """The request id.""" + return self.request_id or gen_hash(self.resource, self.salt) + class EntityPermissionModel(BaseModel): """Entity Permissions Model.""" diff --git a/tests/v1/integration/application-charm/src/charm.py b/tests/v1/integration/application-charm/src/charm.py index 62b9662b..c8f21b9c 100755 --- a/tests/v1/integration/application-charm/src/charm.py +++ b/tests/v1/integration/application-charm/src/charm.py @@ -15,8 +15,7 @@ from ops.charm import ActionEvent, CharmBase from ops.main import main from ops.model import ActiveStatus -from pydantic import Field, SecretStr -from pydantic.types import _SecretBase +from pydantic import Field from charms.data_platform_libs.v1.data_interfaces import ( ExtraSecretStr, @@ -335,7 +334,6 @@ def _on_get_relation_field(self, event: ActionEvent): model = source.interface.build_model(relation.id, component=relation.app) for request in model.requests: value = getattr(request, event.params["field"].replace("-", "_")) - value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value event.set_results({"value": value if value else ""}) def _on_get_relation_self_side_field(self, event: ActionEvent): @@ -347,7 +345,6 @@ def _on_get_relation_self_side_field(self, event: ActionEvent): ) for request in model.requests: value = getattr(request, event.params["field"].replace("-", "_")) - value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value event.set_results({"value": value if value else ""}) def _on_set_relation_field(self, event: ActionEvent): @@ -507,7 +504,7 @@ def _on_set_mtls_cert(self, event: ActionEvent): relation.id, RequirerDataContractV1[KafkaRequestModel], component=self.app ) for response in model.requests: - response.mtls_cert = SecretStr(cert) + response.mtls_cert = cert self.kafka_split_pattern.interface.write_model(relation.id, model) event.set_results({"mtls-cert": cert}) diff --git a/tests/v1/integration/database-charm/src/charm.py b/tests/v1/integration/database-charm/src/charm.py index 1b8a2117..810d0bf2 100755 --- a/tests/v1/integration/database-charm/src/charm.py +++ b/tests/v1/integration/database-charm/src/charm.py @@ -21,8 +21,7 @@ from ops.framework import StoredState from ops.main import main from ops.model import ActiveStatus, MaintenanceStatus -from pydantic import Field, SecretStr -from pydantic.types import _SecretBase +from pydantic import Field from charms.data_platform_libs.v1.data_interfaces import ( DataContractV1, @@ -37,7 +36,6 @@ ResourceProviderEventHandler, ResourceProviderModel, ResourceRequestedEvent, - SecretBool, ) logger = logging.getLogger(__name__) @@ -257,10 +255,10 @@ def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: response = ResourceProviderModel( request_id=request.request_id, resource=resource, - password=SecretStr(password), - username=SecretStr(username), + password=password, + username=username, endpoints=f"{self.model.get_binding('database').network.bind_address}:5432", - tls=SecretBool(False), + tls=False, version=version, ) self.database.set_response(event.relation.id, response) @@ -302,8 +300,8 @@ def _on_resource_entity_requested(self, event: ResourceEntityRequestedEvent) -> response = ResourceProviderModel( request_id=request.request_id, salt=request.salt, - entity_name=SecretStr(rolename), - entity_password=SecretStr(password), + entity_name=rolename, + entity_password=password, ) self.database.set_response(event.relation.id, response) self.unit.status = ActiveStatus() @@ -324,7 +322,6 @@ def _on_get_relation_field(self, event: ActionEvent): ) for request in model.requests: value = getattr(request, event.params["field"].replace("-", "_")) - value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value event.set_results({"value": value if value else ""}) def _on_get_relation_self_side_field(self, event: ActionEvent): @@ -334,7 +331,6 @@ def _on_get_relation_self_side_field(self, event: ActionEvent): model = self.database.interface.build_model(relation.id, DataContract) for request in model.requests: value = getattr(request, event.params["field"].replace("-", "_")) - value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value event.set_results({"value": value if value else ""}) def _on_set_relation_field(self, event: ActionEvent): @@ -387,7 +383,6 @@ def _on_get_peer_relation_field(self, event: ActionEvent): relation = self._peer_relation_unit.relations[0] model = self._peer_relation_unit.build_model(relation.id) value = getattr(model, event.params["field"].replace("-", "_")) - value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value event.set_results({"value": value if value else ""}) def _on_set_peer_relation_field(self, event: ActionEvent): @@ -466,7 +461,6 @@ def _on_get_other_peer_relation_field(self, event: ActionEvent): model, event.params["field"].replace("-", "_") ) for key, item in value.items(): - item = item.get_secret_value() if issubclass(item.__class__, _SecretBase) else item value[key] = item event.set_results(value) diff --git a/tests/v1/integration/dummy-database-charm/src/charm.py b/tests/v1/integration/dummy-database-charm/src/charm.py index 231a5590..92ba0b64 100755 --- a/tests/v1/integration/dummy-database-charm/src/charm.py +++ b/tests/v1/integration/dummy-database-charm/src/charm.py @@ -18,7 +18,6 @@ from ops.main import main from ops.model import ActiveStatus from pydantic import Field -from pydantic.types import _SecretBase from charms.data_platform_libs.v1.data_interfaces import ( DataContractV1, @@ -169,7 +168,6 @@ def _on_get_peer_secret(self, event: ActionEvent): repository = self.peer_relation_unit.repository(relation_bis.id) result = repository.get_secret_field(event.params["field"], event.params["group"]) - result = result.get_secret_value() if issubclass(result.__class__, _SecretBase) else result event.set_results({event.params["field"]: result if result else ""}) def _on_set_peer_secret(self, event: ActionEvent): @@ -224,7 +222,6 @@ def _on_get_peer_relation_field(self, event: ActionEvent): relation = self.peer_relation_unit.relations[0] model = self.peer_relation_unit.build_model(relation.id) value = getattr(model, event.params["field"].replace("-", "_")) - value = value.get_secret_value() if issubclass(value.__class__, _SecretBase) else value event.set_results({"value": value if value else ""}) diff --git a/tests/v1/integration/kafka-charm/src/charm.py b/tests/v1/integration/kafka-charm/src/charm.py index 55c110fb..0acba288 100755 --- a/tests/v1/integration/kafka-charm/src/charm.py +++ b/tests/v1/integration/kafka-charm/src/charm.py @@ -14,7 +14,6 @@ from ops.charm import ActionEvent, CharmBase from ops.main import main from ops.model import ActiveStatus, MaintenanceStatus -from pydantic import SecretStr from charms.data_platform_libs.v1.data_interfaces import ( DataContractV1, @@ -24,7 +23,6 @@ ResourceEntityRequestedEvent, ResourceProviderEventHandler, ResourceRequestedEvent, - SecretBool, ) logger = logging.getLogger(__name__) @@ -107,9 +105,6 @@ def _on_topic_requested(self, event: ResourceRequestedEvent[KafkaRequestModel]): topic = event.request.resource consumer_group_prefix = event.request.consumer_group_prefix - if consumer_group_prefix is not None: - consumer_group_prefix = SecretStr(consumer_group_prefix) - relation_id = event.relation.id username = "admin" @@ -122,13 +117,13 @@ def _on_topic_requested(self, event: ResourceRequestedEvent[KafkaRequestModel]): response = KafkaResponseModel( salt=event.request.salt, request_id=event.request.request_id, - username=SecretStr(username), - password=SecretStr(password), + username=username, + password=password, endpoints=bootstrap_server, consumer_group_prefix=consumer_group_prefix, - tls=SecretBool(True), - tls_ca=SecretStr("Canonical"), - zookeeper_uris=SecretStr("protocol.z1:port/,protocol.z2:port/"), + tls=True, + tls_ca="Canonical", + zookeeper_uris="protocol.z1:port/,protocol.z2:port/", resource=topic, ) self.kafka_provider.set_response(relation_id, response) @@ -145,8 +140,8 @@ def _on_topic_entity_requested(self, event: ResourceEntityRequestedEvent): response = KafkaResponseModel( request_id=event.request.request_id, salt=event.request.salt, - entity_name=SecretStr(rolename), - entity_password=SecretStr(password), + entity_name=rolename, + entity_password=password, ) # set connection info in the databag relation self.kafka_provider.set_response(event.relation.id, response) @@ -170,7 +165,7 @@ def _on_sync_password(self, event: ActionEvent): relation.id, DataContractV1[KafkaResponseModel] ) for request in model.requests: - request.password = SecretStr(password) + request.password = password self.kafka_provider.interface.write_model(relation.id, model) event.set_results({"password": self.get_secret("app", "password")}) @@ -222,7 +217,7 @@ def _on_mtls_cert_updated(self, event: MtlsCertUpdatedEvent): if not mtls_cert: return - open("client-cert.pem", "w").write(mtls_cert.get_secret_value()) + open("client-cert.pem", "w").write(mtls_cert) self.unit.status = ActiveStatus(f"{os.getcwd()}/client-cert.pem") diff --git a/tests/v1/integration/kafka-connect-charm/src/charm.py b/tests/v1/integration/kafka-connect-charm/src/charm.py index 94eb8c24..688cdb4f 100755 --- a/tests/v1/integration/kafka-connect-charm/src/charm.py +++ b/tests/v1/integration/kafka-connect-charm/src/charm.py @@ -13,7 +13,6 @@ from ops.charm import ActionEvent, CharmBase from ops.main import main from ops.model import ActiveStatus, MaintenanceStatus -from pydantic import SecretStr from charms.data_platform_libs.v1.data_interfaces import ( DataContractV1, @@ -21,7 +20,6 @@ ResourceProviderEventHandler, ResourceProviderModel, ResourceRequestedEvent, - SecretBool, ) logger = logging.getLogger(__name__) @@ -113,10 +111,10 @@ def _on_integration_requested(self, event: ResourceRequestedEvent): salt=event.request.salt, request_id=event.request.request_id, endpoints=endpoints, - username=SecretStr(username), - password=SecretStr(password), - tls=SecretBool(False), - tls_ca=SecretStr("disabled"), + username=username, + password=password, + tls=False, + tls_ca="disabled", ) self.provider.set_response(relation_id, response) self.unit.status = ActiveStatus( @@ -150,7 +148,7 @@ def _update_clients_data(self, key: str, value: str) -> None: ) for request in model.requests: if key in ("username", "password"): - setattr(request, key, SecretStr(self.get_secret("app", key))) + setattr(request, key, self.get_secret("app", key)) else: setattr(request, key, value) self.provider.interface.write_model(relation.id, model) diff --git a/tests/v1/integration/opensearch-charm/src/charm.py b/tests/v1/integration/opensearch-charm/src/charm.py index 43b0ea9b..7a6c5f9d 100755 --- a/tests/v1/integration/opensearch-charm/src/charm.py +++ b/tests/v1/integration/opensearch-charm/src/charm.py @@ -14,7 +14,6 @@ from ops.charm import ActionEvent, CharmBase from ops.main import main from ops.model import ActiveStatus, MaintenanceStatus -from pydantic import SecretStr from charms.data_platform_libs.v1.data_interfaces import ( DataContractV1, @@ -23,7 +22,6 @@ ResourceProviderEventHandler, ResourceProviderModel, ResourceRequestedEvent, - SecretBool, ) logger = logging.getLogger(__name__) @@ -97,7 +95,7 @@ def _on_change_admin_password(self, event: ActionEvent): relation.id, DataContractV1[ResourceProviderModel] ) for request in model.requests: - request.password = SecretStr(password) + request.password = password self.opensearch_provider.interface.write_model(relation.id, model) def _on_index_requested(self, event: ResourceRequestedEvent[RequirerCommonModel]): @@ -119,11 +117,11 @@ def _on_index_requested(self, event: ResourceRequestedEvent[RequirerCommonModel] salt=event.request.salt, request_id=event.request.request_id, resource=index, - username=SecretStr(username), - password=SecretStr(password), - tls_ca=SecretStr("Canonical"), + username=username, + password=password, + tls_ca="Canonical", endpoints=endpoints, - tls=SecretBool(True), + tls=True, ) self.opensearch_provider.set_response(relation_id, response) self.unit.status = ActiveStatus(f"index: {index} granted!") @@ -139,8 +137,8 @@ def _on_index_entity_requested(self, event: ResourceEntityRequestedEvent): response = ResourceProviderModel( salt=event.request.salt, request_id=event.request.request_id, - entity_name=SecretStr(rolename), - entity_password=SecretStr(password), + entity_name=rolename, + entity_password=password, ) # set connection info in the databag relation self.opensearch_provider.set_response(event.relation.id, response) diff --git a/tests/v1/unit/test_data_interfaces.py b/tests/v1/unit/test_data_interfaces.py index 07aee16d..783e404d 100644 --- a/tests/v1/unit/test_data_interfaces.py +++ b/tests/v1/unit/test_data_interfaces.py @@ -14,7 +14,7 @@ from ops.model import Relation, Unit from ops.testing import Harness from parameterized import parameterized -from pydantic import Field, SecretStr, TypeAdapter, ValidationError +from pydantic import Field, TypeAdapter, ValidationError from charms.data_platform_libs.v0.data_interfaces import ( PROV_SECRET_PREFIX, @@ -302,8 +302,8 @@ def test_set_credentials_secrets(self): salt="kkkkkkkk", request_id="c759221a6c14c72a", resource=DATABASE, - username=SecretStr("test-username"), - password=SecretStr("test-password"), + username="test-username", + password="test-password", ) # Set the credentials in the relation using the provides charm library. @@ -357,8 +357,8 @@ def test_set_entity_credentials(self): salt="kkkkkkkk", request_id="c759221a6c14c72a", resource=DATABASE, - entity_name=SecretStr("test-name"), # pyright: ignore[reportCallIssue] - entity_password=SecretStr("test-password"), # pyright: ignore[reportCallIssue] + entity_name="test-name", # pyright: ignore[reportCallIssue] + entity_password="test-password", # pyright: ignore[reportCallIssue] ) # Set the credentials in the relation using the provides charm library. @@ -1255,8 +1255,8 @@ def test_on_resource_created_secrets(self, _on_resource_created): # using the requires charm library event. event = _on_resource_created.call_args[0][0] assert event.response.secret_user == secret.id - assert event.response.username.get_secret_value() == "test-username" - assert event.response.password.get_secret_value() == "test-password" + assert event.response.username == "test-username" + assert event.response.password == "test-password" assert self.harness.charm.requirer.is_resource_created( self.rel_id, event.response.request_id @@ -1301,8 +1301,8 @@ def test_on_resource_created_secrets(self, _on_resource_created): # using the requires charm library event. event = _on_resource_created.call_args[0][0] assert event.response.secret_user == secret2.id - assert event.response.username.get_secret_value() == "test-username-2" - assert event.response.password.get_secret_value() == "test-password-2" + assert event.response.username == "test-username-2" + assert event.response.password == "test-password-2" assert self.harness.charm.requirer.is_resource_created(rel_id, event.response.request_id) assert self.harness.charm.requirer.are_all_resources_created(rel_id) @@ -1347,8 +1347,8 @@ def test_on_resource_entity_created_secrets(self, _on_resource_entity_created): # Check that the entity-type, entity-name and entity-password are present in the relation. event = _on_resource_entity_created.call_args[0][0] assert event.response.secret_entity == secret.id - assert event.response.entity_name.get_secret_value() == "test-username" - assert event.response.entity_password.get_secret_value() == "test-password" + assert event.response.entity_name == "test-username" + assert event.response.entity_password == "test-password" # Reset the mock call count. _on_resource_entity_created.reset_mock() @@ -1388,7 +1388,7 @@ def test_on_resource_entity_created_secrets(self, _on_resource_entity_created): # Check that the entity-type and entity-name are present in the relation. event = _on_resource_entity_created.call_args[0][0] assert event.response.secret_entity == secret2.id - assert event.response.entity_name.get_secret_value() == "test-groupname" + assert event.response.entity_name == "test-groupname" assert event.response.entity_password is None def test_fetch_relation_data_secrets_fields(self): @@ -1695,9 +1695,9 @@ def test_additional_fields_are_accessible(self, _on_resource_created): # Check that the fields are present in the relation # using the requires charm library. - assert event.response.tls.get_secret_value() is True - assert event.response.tls_ca.get_secret_value() == "deadbeef" - assert event.response.uris.get_secret_value() == "host1:port,host2:port" + assert event.response.tls is True + assert event.response.tls_ca == "deadbeef" + assert event.response.uris == "host1:port,host2:port" assert event.response.version == "1.0" def test_assign_relation_alias(self): From 05469079575b47204f594917d7a5000b0e632bcc Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Tue, 21 Oct 2025 16:30:24 +0200 Subject: [PATCH 30/34] fix: linting --- lib/charms/data_platform_libs/v1/data_interfaces.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 3befac7f..5aa3ae4a 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -815,7 +815,9 @@ def extract_secrets(self, info: ValidationInfo): return self @model_serializer(mode="wrap") - def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): # noqa: C901 + def serialize_model( + self, handler: SerializerFunctionWrapHandler, info: SerializationInfo + ): # noqa: C901 """Serializes the model writing the secrets in their respective secrets.""" if not info.context or not isinstance(info.context.get("repository"), AbstractRepository): logger.debug("No secret parsing serialization as we're lacking context here.") @@ -2606,8 +2608,8 @@ def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> for request in model.requests: if request.endpoints and request.username and request.password: host = request.endpoints.split(":")[0] - username = request.username.get_secret_value() - password = request.password.get_secret_value() + username = request.username + password = request.password connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" return self._is_pg_plugin_enabled(plugin, connection_string) From 24042be10a6fbb7bc312171720998d10cfc823bb Mon Sep 17 00:00:00 2001 From: Neha Oudin <17551419+Gu1nness@users.noreply.github.com> Date: Wed, 22 Oct 2025 07:36:11 +0000 Subject: [PATCH 31/34] Update lib/charms/data_platform_libs/v1/data_interfaces.py Co-authored-by: Smail KOURTA --- lib/charms/data_platform_libs/v1/data_interfaces.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 5aa3ae4a..cb866397 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -224,8 +224,8 @@ def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: salt=event.request.salt, request_id=event.request.request_id, resource=db_name, - username=SecretStr(username), - password=SecretStr(password), + username=username, + password=password, ... ) self.provided_database.set_response(event.relation.id, response) From 4d64db8317e2bed164742adb48a195275cda90ff Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Thu, 23 Oct 2025 14:29:51 +0200 Subject: [PATCH 32/34] fix: helpers for etcd --- .../data_platform_libs/v1/data_interfaces.py | 88 +++++++++++++++++++ 1 file changed, 88 insertions(+) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index cb866397..06ee9dd5 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -249,6 +249,7 @@ def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: import random import string from abc import ABC, abstractmethod +from collections.abc import Sequence from datetime import datetime from enum import Enum from typing import ( @@ -2469,6 +2470,93 @@ def set_response(self, relation_id: int, response: ResourceProviderModel): self.interface.write_model(relation_id, model) return + def set_responses(self, relation_id: int, responses: list[ResourceProviderModel]) -> None: + r"""Sets a list of responses in the databag. + + This function will react accordingly to the version number. + If the version number is v0, then we write the data directly in the databag. + If the version number is v1, then we write the data in the list of responses. + + /!\ This function updates a response if it was already present in the databag! + + Args: + relation_id: The specific relation id for that event. + responses: The response to write in the databag. + """ + if not self.charm.unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + assert len(responses) >= 1, "List of responses is empty" + + if not relation: + raise ValueError("Missing relation.") + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field("version") or "v0" + + if version == "v0": + assert len(responses) == 1, "V0 only expects one response" + # Ensure the request_id is None + response = responses[0] + response.request_id = None + self.interface.write_model( + relation_id, response, context={"version": "v0"} + ) # {"database": "database-name", "secret-user": "uri", ...} + return + + model = self.interface.build_model(relation_id, DataContractV1[responses[0].__class__]) + + response_map: dict[str, ResourceProviderModel] = {response.request_id: response for response in responses if response.request_id} + + # Update all the already existing keys + for index, _response in enumerate(model.requests): + assert _response.request_id, "Missing request id in the response" + response = response_map.get(_response.request_id) + if response: + model.requests[index].update(response) + del response_map[_response.request_id] + + # Add the missing keys + model.requests += list(response_map.values()) + + self.interface.write_model(relation_id, model) + return + + def requests(self, relation: Relation) -> Sequence[RequirerCommonModel]: + """Returns the list of requests that we got.""" + repository = OpsRelationRepository( + self.model, relation, component=relation.app + ) + + # Don't do anything until we get some data + if not repository.get_data(): + return [] + + version = repository.get_field("version") or "v0" + if version == "v0": + request_model = build_model(repository, RequirerDataContractV0) + request_model.request_id = None # For safety, let's ensure that we don't have a model. + return [request_model] + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + return request_model.requests + + def responses(self, relation: Relation, model: type[ResourceProviderModel]) -> list[ResourceProviderModel]: + """Returns the list of responses that we currently have.""" + repository = self.interface.repository(relation.id, component=relation.app) + + version = repository.get_field("version") or "v0" + if version == "v0": + # Ensure the request_id is None + return [self.interface.build_model(relation.id, DataContractV0)] + + return self.interface.build_model(relation.id, DataContractV1[model]).requests + + + + class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): """Event Handler for resource requirer.""" From 9abdd8c3642cd3a7bd55f155006546e8b26dc97c Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Thu, 23 Oct 2025 14:54:46 +0200 Subject: [PATCH 33/34] fix: dict-like access to the models --- .../data_platform_libs/v1/data_interfaces.py | 36 +++++++++++++++++++ tests/v1/unit/test_data_interfaces.py | 2 ++ 2 files changed, 38 insertions(+) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index 06ee9dd5..db47b79c 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -752,6 +752,24 @@ def serialize_model(self, handler: SerializerFunctionWrapHandler, info: Serializ secret.set_content(full_content) return handler(self) + def __getitem__(self, key): + """Dict like access to the model.""" + try: + return getattr(self, key.replace("-", "_")) + except Exception: + raise KeyError(f"{key} is not present in the model") + + def __setitem__(self, key, value): + """Dict like setter for the model.""" + return setattr(self, key.replace("-", "_"), value) + + def __delitem__(self, key): + """Dict like deleter for the model.""" + try: + return delattr(self, key.replace("-", "_")) + except Exception: + raise KeyError(f"{key} is not present in the model.") + class BaseCommonModel(BaseModel): """Embeds the logic of parsing and serializing.""" @@ -896,6 +914,24 @@ def short_uuid(self) -> str | None: """The request id.""" return None + def __getitem__(self, key): + """Dict like access to the model.""" + try: + return getattr(self, key.replace("-", "_")) + except Exception: + raise KeyError(f"{key} is not present in the model") + + def __setitem__(self, key, value): + """Dict like setter for the model.""" + return setattr(self, key.replace("-", "_"), value) + + def __delitem__(self, key): + """Dict like deleter for the model.""" + try: + return delattr(self, key.replace("-", "_")) + except Exception: + raise KeyError(f"{key} is not present in the model.") + class CommonModel(BaseCommonModel): """Common Model for both requirer and provider. diff --git a/tests/v1/unit/test_data_interfaces.py b/tests/v1/unit/test_data_interfaces.py index 783e404d..30f49ef3 100644 --- a/tests/v1/unit/test_data_interfaces.py +++ b/tests/v1/unit/test_data_interfaces.py @@ -306,6 +306,8 @@ def test_set_credentials_secrets(self): password="test-password", ) + assert response["request-id"] == response.request_id + # Set the credentials in the relation using the provides charm library. self.harness.charm.provider.set_response(self.rel_id, response) From a6b2d25613db7e2a2116f5ff13f140f155fa1774 Mon Sep 17 00:00:00 2001 From: Neha Oudin Date: Thu, 23 Oct 2025 15:04:59 +0200 Subject: [PATCH 34/34] fix: lint --- .../data_platform_libs/v1/data_interfaces.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/lib/charms/data_platform_libs/v1/data_interfaces.py b/lib/charms/data_platform_libs/v1/data_interfaces.py index db47b79c..251a19d0 100644 --- a/lib/charms/data_platform_libs/v1/data_interfaces.py +++ b/lib/charms/data_platform_libs/v1/data_interfaces.py @@ -2544,7 +2544,9 @@ def set_responses(self, relation_id: int, responses: list[ResourceProviderModel] model = self.interface.build_model(relation_id, DataContractV1[responses[0].__class__]) - response_map: dict[str, ResourceProviderModel] = {response.request_id: response for response in responses if response.request_id} + response_map: dict[str, ResourceProviderModel] = { + response.request_id: response for response in responses if response.request_id + } # Update all the already existing keys for index, _response in enumerate(model.requests): @@ -2562,9 +2564,7 @@ def set_responses(self, relation_id: int, responses: list[ResourceProviderModel] def requests(self, relation: Relation) -> Sequence[RequirerCommonModel]: """Returns the list of requests that we got.""" - repository = OpsRelationRepository( - self.model, relation, component=relation.app - ) + repository = OpsRelationRepository(self.model, relation, component=relation.app) # Don't do anything until we get some data if not repository.get_data(): @@ -2579,7 +2579,9 @@ def requests(self, relation: Relation) -> Sequence[RequirerCommonModel]: request_model = build_model(repository, RequirerDataContractV1[self.request_model]) return request_model.requests - def responses(self, relation: Relation, model: type[ResourceProviderModel]) -> list[ResourceProviderModel]: + def responses( + self, relation: Relation, model: type[ResourceProviderModel] + ) -> list[ResourceProviderModel]: """Returns the list of responses that we currently have.""" repository = self.interface.repository(relation.id, component=relation.app) @@ -2591,9 +2593,6 @@ def responses(self, relation: Relation, model: type[ResourceProviderModel]) -> l return self.interface.build_model(relation.id, DataContractV1[model]).requests - - - class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): """Event Handler for resource requirer."""