Add linters and formatting
Change-Id: Ib16bd1fac4b9dfc21b98c9ba1201305733d2598b
This commit is contained in:
parent
bc200fc470
commit
004e885760
@ -8,7 +8,7 @@ needs to be applied to user supplied charm configuration. The context
|
||||
has access to the charm object.
|
||||
|
||||
Below is an example which applies logic to the charm config as well as
|
||||
collecting the application name to constuct the context.
|
||||
collecting the application name to construct the context.
|
||||
|
||||
.. code:: python
|
||||
|
||||
|
@ -15,7 +15,7 @@ Common Pebble handler changes
|
||||
ASO provides a pebble handler base classes which provide the starting point
|
||||
for writing a new handler. If the container runs a service then the
|
||||
`ServicePebbleHandler` should be used. If the container does not provide a
|
||||
service (perhaps its just an environment for executing commands that affact
|
||||
service (perhaps its just an environment for executing commands that effect
|
||||
other container) then `PebbleHandler` should be used.
|
||||
|
||||
.. code:: python
|
||||
@ -55,7 +55,7 @@ all available contexts.
|
||||
'root',
|
||||
'root')]
|
||||
|
||||
If a service should be running in the conainer the handler specifies the
|
||||
If a service should be running in the container the handler specifies the
|
||||
layer describing the service that will be passed to pebble.
|
||||
|
||||
.. code:: python
|
||||
|
@ -51,7 +51,7 @@ an observer for relation changed events.
|
||||
self.framework.observe(db_relation_event, self._on_database_changed)
|
||||
return db
|
||||
|
||||
The method run when tha changed event is seen checks whether all required data
|
||||
The method runs when the changed event is seen checks whether all required data
|
||||
has been provided. If it is then it calls back to the charm, if not then no
|
||||
action is taken.
|
||||
|
||||
|
@ -107,7 +107,7 @@ Test Service
|
||||
============
|
||||
|
||||
Check that the juju status shows the charms is active and no error messages are
|
||||
preset. Then check the ironic api service is reponding.
|
||||
preset. Then check the ironic api service is responding.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
|
@ -31,17 +31,26 @@ containers and managing the service running in the container.
|
||||
|
||||
import ipaddress
|
||||
import logging
|
||||
from typing import List, Mapping
|
||||
from typing import (
|
||||
List,
|
||||
Mapping,
|
||||
)
|
||||
|
||||
import charms.observability_libs.v0.kubernetes_service_patch as kube_svc_patch
|
||||
import ops.charm
|
||||
import ops.framework
|
||||
import ops.model
|
||||
import ops.pebble
|
||||
|
||||
from ops.model import ActiveStatus, MaintenanceStatus
|
||||
|
||||
from lightkube import Client
|
||||
from lightkube.resources.core_v1 import Service
|
||||
from lightkube import (
|
||||
Client,
|
||||
)
|
||||
from lightkube.resources.core_v1 import (
|
||||
Service,
|
||||
)
|
||||
from ops.model import (
|
||||
ActiveStatus,
|
||||
MaintenanceStatus,
|
||||
)
|
||||
|
||||
import ops_sunbeam.compound_status as compound_status
|
||||
import ops_sunbeam.config_contexts as sunbeam_config_contexts
|
||||
@ -49,8 +58,6 @@ import ops_sunbeam.container_handlers as sunbeam_chandlers
|
||||
import ops_sunbeam.core as sunbeam_core
|
||||
import ops_sunbeam.relation_handlers as sunbeam_rhandlers
|
||||
|
||||
import charms.observability_libs.v0.kubernetes_service_patch as kube_svc_patch
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -71,12 +78,13 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
self.status_pool.add(self.status)
|
||||
self._state.set_default(bootstrapped=False)
|
||||
self.bootstrap_status = compound_status.Status(
|
||||
"bootstrap",
|
||||
priority=90)
|
||||
"bootstrap", priority=90
|
||||
)
|
||||
self.status_pool.add(self.bootstrap_status)
|
||||
if not self.bootstrapped():
|
||||
self.bootstrap_status.set(MaintenanceStatus(
|
||||
"Service not bootstrapped"))
|
||||
self.bootstrap_status.set(
|
||||
MaintenanceStatus("Service not bootstrapped")
|
||||
)
|
||||
self.relation_handlers = self.get_relation_handlers()
|
||||
self.pebble_handlers = self.get_pebble_handlers()
|
||||
self.framework.observe(self.on.config_changed, self._on_config_changed)
|
||||
@ -145,9 +153,9 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
if self.can_add_handler("cloud-credentials", handlers):
|
||||
self.ccreds = sunbeam_rhandlers.CloudCredentialsRequiresHandler(
|
||||
self,
|
||||
'cloud-credentials',
|
||||
"cloud-credentials",
|
||||
self.configure_charm,
|
||||
'cloud-credentials' in self.mandatory_relations,
|
||||
"cloud-credentials" in self.mandatory_relations,
|
||||
)
|
||||
handlers.append(self.ccreds)
|
||||
return handlers
|
||||
@ -162,25 +170,26 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
ip_sans = []
|
||||
for relation_name in self.meta.relations.keys():
|
||||
for relation in self.framework.model.relations.get(
|
||||
relation_name, []):
|
||||
relation_name, []
|
||||
):
|
||||
binding = self.model.get_binding(relation)
|
||||
ip_sans.append(binding.network.ingress_address)
|
||||
ip_sans.append(binding.network.bind_address)
|
||||
|
||||
for binding_name in ['public']:
|
||||
for binding_name in ["public"]:
|
||||
try:
|
||||
binding = self.model.get_binding(binding_name)
|
||||
ip_sans.append(binding.network.ingress_address)
|
||||
ip_sans.append(binding.network.bind_address)
|
||||
except ops.model.ModelError:
|
||||
logging.debug(f'No binding found for {binding_name}')
|
||||
logging.debug(f"No binding found for {binding_name}")
|
||||
return ip_sans
|
||||
|
||||
def get_domain_name_sans(self) -> List[str]:
|
||||
"""Get Domain names for service."""
|
||||
domain_name_sans = []
|
||||
for binding_config in ['admin', 'internal', 'public']:
|
||||
hostname = self.config.get(f'os-{binding_config}-hostname')
|
||||
for binding_config in ["admin", "internal", "public"]:
|
||||
hostname = self.config.get(f"os-{binding_config}-hostname")
|
||||
if hostname:
|
||||
domain_name_sans.append(hostname)
|
||||
return domain_name_sans
|
||||
@ -200,8 +209,7 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
]
|
||||
|
||||
def get_named_pebble_handler(
|
||||
self,
|
||||
container_name: str
|
||||
self, container_name: str
|
||||
) -> sunbeam_chandlers.PebbleHandler:
|
||||
"""Get pebble handler matching container_name."""
|
||||
pebble_handlers = [
|
||||
@ -209,16 +217,16 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
for h in self.pebble_handlers
|
||||
if h.container_name == container_name
|
||||
]
|
||||
assert len(pebble_handlers) < 2, ("Multiple pebble handlers with the "
|
||||
"same name found.")
|
||||
assert len(pebble_handlers) < 2, (
|
||||
"Multiple pebble handlers with the " "same name found."
|
||||
)
|
||||
if pebble_handlers:
|
||||
return pebble_handlers[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_named_pebble_handlers(
|
||||
self,
|
||||
container_names: List[str]
|
||||
self, container_names: List[str]
|
||||
) -> List[sunbeam_chandlers.PebbleHandler]:
|
||||
"""Get pebble handlers matching container_names."""
|
||||
return [
|
||||
@ -227,6 +235,7 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
if h.container_name in container_names
|
||||
]
|
||||
|
||||
# flake8: noqa: C901
|
||||
def configure_charm(self, event: ops.framework.EventBase) -> None:
|
||||
"""Catchall handler to configure charm services."""
|
||||
if self.supports_peer_relation and not (
|
||||
@ -246,19 +255,22 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
else:
|
||||
logging.debug(
|
||||
f"Not running init for {ph.service_name},"
|
||||
" container not ready")
|
||||
" container not ready"
|
||||
)
|
||||
|
||||
for ph in self.pebble_handlers:
|
||||
if not ph.service_ready:
|
||||
logging.debug(
|
||||
f"Aborting container {ph.service_name} service not ready")
|
||||
f"Aborting container {ph.service_name} service not ready"
|
||||
)
|
||||
return
|
||||
|
||||
if not self.bootstrapped():
|
||||
if not self._do_bootstrap():
|
||||
self._state.bootstrapped = False
|
||||
logging.warning(
|
||||
"Failed to bootstrap the service, event deferred")
|
||||
"Failed to bootstrap the service, event deferred"
|
||||
)
|
||||
# Defer the event to re-trigger the bootstrap process
|
||||
event.defer()
|
||||
return
|
||||
@ -306,8 +318,7 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
|
||||
@property
|
||||
def databases(self) -> Mapping[str, str]:
|
||||
"""
|
||||
Return a mapping of database relation names to database names.
|
||||
"""Return a mapping of database relation names to database names.
|
||||
|
||||
Use this to define the databases required by an application.
|
||||
|
||||
@ -322,9 +333,7 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
It defaults to loading a relation named "database",
|
||||
with the database named after the service name.
|
||||
"""
|
||||
return {
|
||||
"database": self.service_name.replace("-", "_")
|
||||
}
|
||||
return {"database": self.service_name.replace("-", "_")}
|
||||
|
||||
def _on_config_changed(self, event: ops.framework.EventBase) -> None:
|
||||
self.configure_charm(event)
|
||||
@ -345,7 +354,8 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
if handler.mandatory and handler.ready
|
||||
}
|
||||
not_ready_relations = self.mandatory_relations.difference(
|
||||
ready_relations)
|
||||
ready_relations
|
||||
)
|
||||
|
||||
if len(not_ready_relations) != 0:
|
||||
logger.info(f"Relations {not_ready_relations} incomplete")
|
||||
@ -369,7 +379,7 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
return ra
|
||||
|
||||
def bootstrapped(self) -> bool:
|
||||
"""Determine whether the service has been boostrapped."""
|
||||
"""Determine whether the service has been bootstrapped."""
|
||||
return self._state.bootstrapped
|
||||
|
||||
def leader_set(self, settings: dict = None, **kwargs) -> None:
|
||||
@ -401,24 +411,26 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
:raises: pebble.ExecError
|
||||
"""
|
||||
if not self.unit.is_leader():
|
||||
logging.info('Not lead unit, skipping DB syncs')
|
||||
logging.info("Not lead unit, skipping DB syncs")
|
||||
return
|
||||
try:
|
||||
if self.db_sync_cmds:
|
||||
logger.info("Syncing database...")
|
||||
container = self.unit.get_container(
|
||||
self.db_sync_container_name)
|
||||
self.db_sync_container_name
|
||||
)
|
||||
for cmd in self.db_sync_cmds:
|
||||
logging.debug('Running sync: \n%s', cmd)
|
||||
process = container.exec(cmd, timeout=5*60)
|
||||
logging.debug("Running sync: \n%s", cmd)
|
||||
process = container.exec(cmd, timeout=5 * 60)
|
||||
out, warnings = process.wait_output()
|
||||
if warnings:
|
||||
for line in warnings.splitlines():
|
||||
logger.warning('DB Sync Out: %s', line.strip())
|
||||
logging.debug('Output from database sync: \n%s', out)
|
||||
logger.warning("DB Sync Out: %s", line.strip())
|
||||
logging.debug("Output from database sync: \n%s", out)
|
||||
except AttributeError:
|
||||
logger.warning(
|
||||
"Not DB sync ran. Charm does not specify self.db_sync_cmds")
|
||||
"Not DB sync ran. Charm does not specify self.db_sync_cmds"
|
||||
)
|
||||
|
||||
def _do_bootstrap(self) -> bool:
|
||||
"""Perform bootstrap.
|
||||
@ -431,21 +443,17 @@ class OSBaseOperatorCharm(ops.charm.CharmBase):
|
||||
self.bootstrap_status.set(ActiveStatus())
|
||||
return True
|
||||
except ops.pebble.ExecError as e:
|
||||
logger.exception('Failed to bootstrap')
|
||||
logger.error('Exited with code %d. Stderr:', e.exit_code)
|
||||
logger.exception("Failed to bootstrap")
|
||||
logger.error("Exited with code %d. Stderr:", e.exit_code)
|
||||
for line in e.stderr.splitlines():
|
||||
logger.error(' %s', line)
|
||||
logger.error(" %s", line)
|
||||
return False
|
||||
|
||||
|
||||
class OSBaseOperatorAPICharm(OSBaseOperatorCharm):
|
||||
"""Base class for OpenStack API operators."""
|
||||
|
||||
mandatory_relations = {
|
||||
'database',
|
||||
'identity-service',
|
||||
'ingress-public'
|
||||
}
|
||||
mandatory_relations = {"database", "identity-service", "ingress-public"}
|
||||
|
||||
def __init__(self, framework: ops.framework.Framework) -> None:
|
||||
"""Run constructor."""
|
||||
@ -507,11 +515,13 @@ class OSBaseOperatorAPICharm(OSBaseOperatorCharm):
|
||||
update the relevant endpoints with the identity service, and then
|
||||
call the configure_charm.
|
||||
"""
|
||||
logger.debug('Received an ingress_changed event')
|
||||
logger.debug("Received an ingress_changed event")
|
||||
try:
|
||||
if self.id_svc.update_service_endpoints:
|
||||
logger.debug('Updating service endpoints after ingress '
|
||||
'relation changed.')
|
||||
logger.debug(
|
||||
"Updating service endpoints after ingress "
|
||||
"relation changed."
|
||||
)
|
||||
self.id_svc.update_service_endpoints(self.service_endpoints)
|
||||
except AttributeError:
|
||||
pass
|
||||
@ -540,17 +550,20 @@ class OSBaseOperatorAPICharm(OSBaseOperatorCharm):
|
||||
if load_balancer_status:
|
||||
ingress_addresses = load_balancer_status.ingress
|
||||
if ingress_addresses:
|
||||
logger.debug('Found ingress addresses on loadbalancer '
|
||||
'status')
|
||||
logger.debug(
|
||||
"Found ingress addresses on loadbalancer " "status"
|
||||
)
|
||||
ingress_address = ingress_addresses[0]
|
||||
addr = ingress_address.hostname or ingress_address.ip
|
||||
if addr:
|
||||
logger.debug('Using ingress address from loadbalancer '
|
||||
f'as {addr}')
|
||||
logger.debug(
|
||||
"Using ingress address from loadbalancer "
|
||||
f"as {addr}"
|
||||
)
|
||||
return ingress_address.hostname or ingress_address.ip
|
||||
|
||||
hostname = self.model.get_binding(
|
||||
'identity-service'
|
||||
"identity-service"
|
||||
).network.ingress_address
|
||||
return hostname
|
||||
|
||||
@ -559,9 +572,11 @@ class OSBaseOperatorAPICharm(OSBaseOperatorCharm):
|
||||
"""Url for accessing the public endpoint for this service."""
|
||||
try:
|
||||
if self.ingress_public.url:
|
||||
logger.debug('Ingress-public relation found, returning '
|
||||
'ingress-public.url of: %s',
|
||||
self.ingress_public.url)
|
||||
logger.debug(
|
||||
"Ingress-public relation found, returning "
|
||||
"ingress-public.url of: %s",
|
||||
self.ingress_public.url,
|
||||
)
|
||||
return self.ingress_public.url
|
||||
except AttributeError:
|
||||
pass
|
||||
@ -581,9 +596,11 @@ class OSBaseOperatorAPICharm(OSBaseOperatorCharm):
|
||||
"""Url for accessing the internal endpoint for this service."""
|
||||
try:
|
||||
if self.ingress_internal.url:
|
||||
logger.debug('Ingress-internal relation found, returning '
|
||||
'ingress_internal.url of: %s',
|
||||
self.ingress_internal.url)
|
||||
logger.debug(
|
||||
"Ingress-internal relation found, returning "
|
||||
"ingress_internal.url of: %s",
|
||||
self.ingress_internal.url,
|
||||
)
|
||||
return self.ingress_internal.url
|
||||
except AttributeError:
|
||||
pass
|
||||
@ -669,4 +686,4 @@ class OSBaseOperatorAPICharm(OSBaseOperatorCharm):
|
||||
@property
|
||||
def healthcheck_http_url(self) -> str:
|
||||
"""Healthcheck HTTP URL for the service."""
|
||||
return f'http://localhost:{self.default_public_ingress_port}/'
|
||||
return f"http://localhost:{self.default_public_ingress_port}/"
|
||||
|
@ -1,5 +1,18 @@
|
||||
"""
|
||||
A mini library for tracking status messages.
|
||||
# Copyright 2022 Canonical Ltd.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""A mini library for tracking status messages.
|
||||
|
||||
We want this because keeping track of everything
|
||||
with a single unit.status is too difficult.
|
||||
@ -11,12 +24,31 @@ aspects of the application without clobbering other parts.
|
||||
"""
|
||||
import json
|
||||
import logging
|
||||
from typing import Callable, Dict, Tuple, Optional
|
||||
from typing import (
|
||||
Callable,
|
||||
Dict,
|
||||
Optional,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
from ops.charm import CharmBase
|
||||
from ops.framework import Handle, Object, StoredStateData, CommitEvent
|
||||
from ops.model import ActiveStatus, StatusBase, UnknownStatus, WaitingStatus
|
||||
from ops.storage import NoSnapshotError
|
||||
from ops.charm import (
|
||||
CharmBase,
|
||||
)
|
||||
from ops.framework import (
|
||||
CommitEvent,
|
||||
Handle,
|
||||
Object,
|
||||
StoredStateData,
|
||||
)
|
||||
from ops.model import (
|
||||
ActiveStatus,
|
||||
StatusBase,
|
||||
UnknownStatus,
|
||||
WaitingStatus,
|
||||
)
|
||||
from ops.storage import (
|
||||
NoSnapshotError,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -30,8 +62,7 @@ STATUS_PRIORITIES = {
|
||||
|
||||
|
||||
class Status:
|
||||
"""
|
||||
An atomic status.
|
||||
"""An atomic status.
|
||||
|
||||
A wrapper around a StatusBase from ops,
|
||||
that adds a priority, label,
|
||||
@ -39,8 +70,7 @@ class Status:
|
||||
"""
|
||||
|
||||
def __init__(self, label: str, priority: int = 0) -> None:
|
||||
"""
|
||||
Create a new Status object.
|
||||
"""Create a new Status object.
|
||||
|
||||
label: string label
|
||||
priority: integer, higher number is higher priority, default is 0
|
||||
@ -59,8 +89,7 @@ class Status:
|
||||
self.on_update: Optional[Callable[[], None]] = None
|
||||
|
||||
def set(self, status: StatusBase) -> None:
|
||||
"""
|
||||
Set the status.
|
||||
"""Set the status.
|
||||
|
||||
Will also run the on_update hook if available
|
||||
(should be set by the pool so the pool knows when it should update).
|
||||
@ -71,8 +100,7 @@ class Status:
|
||||
self.on_update()
|
||||
|
||||
def message(self) -> str:
|
||||
"""
|
||||
Get the status message consistently.
|
||||
"""Get the status message consistently.
|
||||
|
||||
Useful because UnknownStatus has no message attribute.
|
||||
"""
|
||||
@ -81,8 +109,7 @@ class Status:
|
||||
return self.status.message
|
||||
|
||||
def priority(self) -> Tuple[int, int]:
|
||||
"""
|
||||
Return a value to use for sorting statuses by priority.
|
||||
"""Return a value to use for sorting statuses by priority.
|
||||
|
||||
Used by the pool to retrieve the highest priority status
|
||||
to display to the user.
|
||||
@ -98,16 +125,14 @@ class Status:
|
||||
|
||||
|
||||
class StatusPool(Object):
|
||||
"""
|
||||
A pool of Status objects.
|
||||
"""A pool of Status objects.
|
||||
|
||||
This is implemented as an `Object`,
|
||||
so we can more simply save state between hook executions.
|
||||
"""
|
||||
|
||||
def __init__(self, charm: CharmBase) -> None:
|
||||
"""
|
||||
Init the status pool and restore from stored state if available.
|
||||
"""Init the status pool and restore from stored state if available.
|
||||
|
||||
Note that instantiating more than one StatusPool here is not supported,
|
||||
due to hardcoded framework stored data IDs.
|
||||
@ -143,15 +168,14 @@ class StatusPool(Object):
|
||||
charm.framework.observe(charm.framework.on.commit, self._on_commit)
|
||||
|
||||
def add(self, status: Status) -> None:
|
||||
"""
|
||||
Idempotently add a status object to the pool.
|
||||
"""Idempotently add a status object to the pool.
|
||||
|
||||
Reconstitute from saved state if it's a new status.
|
||||
"""
|
||||
if (
|
||||
status.never_set and
|
||||
status.label in self._status_state and
|
||||
status.label not in self._pool
|
||||
status.never_set
|
||||
and status.label in self._status_state
|
||||
and status.label not in self._pool
|
||||
):
|
||||
# If this status hasn't been seen or set yet,
|
||||
# and we have saved state for it,
|
||||
@ -168,24 +192,24 @@ class StatusPool(Object):
|
||||
self.on_update()
|
||||
|
||||
def summarise(self) -> str:
|
||||
"""
|
||||
Return a human readable summary of all the statuses in the pool.
|
||||
"""Return a human readable summary of all the statuses in the pool.
|
||||
|
||||
Will be a multi-line string.
|
||||
"""
|
||||
lines = []
|
||||
for status in sorted(self._pool.values(), key=lambda x: x.priority()):
|
||||
lines.append("{label:>30}: {status:>10} | {message}".format(
|
||||
label=status.label,
|
||||
message=status.message(),
|
||||
status=status.status.name,
|
||||
))
|
||||
lines.append(
|
||||
"{label:>30}: {status:>10} | {message}".format(
|
||||
label=status.label,
|
||||
message=status.message(),
|
||||
status=status.status.name,
|
||||
)
|
||||
)
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _on_commit(self, _event: CommitEvent) -> None:
|
||||
"""
|
||||
Store the current state of statuses.
|
||||
"""Store the current state of statuses.
|
||||
|
||||
So we can restore them on the next run of the charm.
|
||||
"""
|
||||
@ -199,8 +223,7 @@ class StatusPool(Object):
|
||||
self._charm.framework._storage.commit()
|
||||
|
||||
def on_update(self) -> None:
|
||||
"""
|
||||
Update the unit status with the current highest priority status.
|
||||
"""Update the unit status with the current highest priority status.
|
||||
|
||||
Use as a hook to run whenever a status is updated in the pool.
|
||||
"""
|
||||
@ -223,5 +246,5 @@ class StatusPool(Object):
|
||||
"({}){}".format(
|
||||
status.label,
|
||||
" " + message if message else "",
|
||||
)
|
||||
),
|
||||
)
|
||||
|
@ -19,9 +19,14 @@ create reusable contexts which translate charm config, deployment state etc.
|
||||
These are not specific to a relation.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from __future__ import (
|
||||
annotations,
|
||||
)
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import ops_sunbeam.charm
|
||||
@ -30,7 +35,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# XXX Dulpicating definition in relation handlers
|
||||
ERASURE_CODED = "erasure-coded"
|
||||
REPLICATED = "replacated"
|
||||
REPLICATED = "replicated"
|
||||
|
||||
|
||||
class ConfigContext:
|
||||
@ -71,7 +76,7 @@ class WSGIWorkerConfigContext(ConfigContext):
|
||||
|
||||
def context(self) -> dict:
|
||||
"""WSGI configuration options."""
|
||||
log_svc_name = self.charm.service_name.replace('-', '_')
|
||||
log_svc_name = self.charm.service_name.replace("-", "_")
|
||||
return {
|
||||
"name": self.charm.service_name,
|
||||
"public_port": self.charm.default_public_ingress_port,
|
||||
@ -105,21 +110,20 @@ class CinderCephConfigurationContext(ConfigContext):
|
||||
def context(self) -> None:
|
||||
"""Cinder Ceph configuration context."""
|
||||
config = self.charm.model.config.get
|
||||
data_pool_name = config('rbd-pool-name') or self.charm.app.name
|
||||
if config('pool-type') == ERASURE_CODED:
|
||||
data_pool_name = config("rbd-pool-name") or self.charm.app.name
|
||||
if config("pool-type") == ERASURE_CODED:
|
||||
pool_name = (
|
||||
config('ec-rbd-metadata-pool') or
|
||||
f"{data_pool_name}-metadata"
|
||||
config("ec-rbd-metadata-pool") or f"{data_pool_name}-metadata"
|
||||
)
|
||||
else:
|
||||
pool_name = data_pool_name
|
||||
backend_name = config('volume-backend-name') or self.charm.app.name
|
||||
backend_name = config("volume-backend-name") or self.charm.app.name
|
||||
# TODO:
|
||||
# secret_uuid needs to be generated and shared for the app
|
||||
return {
|
||||
'cluster_name': self.charm.app.name,
|
||||
'rbd_pool': pool_name,
|
||||
'rbd_user': self.charm.app.name,
|
||||
'backend_name': backend_name,
|
||||
'backend_availability_zone': config('backend-availability-zone'),
|
||||
"cluster_name": self.charm.app.name,
|
||||
"rbd_pool": pool_name,
|
||||
"rbd_user": self.charm.app.name,
|
||||
"backend_name": backend_name,
|
||||
"backend_availability_zone": config("backend-availability-zone"),
|
||||
}
|
||||
|
@ -21,17 +21,25 @@ in the container.
|
||||
|
||||
import collections
|
||||
import logging
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
)
|
||||
from typing import (
|
||||
List,
|
||||
TypedDict,
|
||||
)
|
||||
|
||||
import ops.charm
|
||||
import ops.pebble
|
||||
from ops.model import (
|
||||
ActiveStatus,
|
||||
BlockedStatus,
|
||||
WaitingStatus,
|
||||
)
|
||||
|
||||
import ops_sunbeam.compound_status as compound_status
|
||||
import ops_sunbeam.core as sunbeam_core
|
||||
import ops_sunbeam.templating as sunbeam_templating
|
||||
import ops.charm
|
||||
import ops.pebble
|
||||
|
||||
from ops.model import ActiveStatus, WaitingStatus, BlockedStatus
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import List, TypedDict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -73,8 +81,7 @@ class PebbleHandler(ops.charm.Object):
|
||||
self.charm.status_pool.add(self.status)
|
||||
|
||||
self.framework.observe(
|
||||
self.charm.on.update_status,
|
||||
self._on_update_status
|
||||
self.charm.on.update_status, self._on_update_status
|
||||
)
|
||||
|
||||
def setup_pebble_handler(self) -> None:
|
||||
@ -150,10 +157,8 @@ class PebbleHandler(ops.charm.Object):
|
||||
for d in self.directories:
|
||||
logging.debug(f"Creating {d.path}")
|
||||
container.make_dir(
|
||||
d.path,
|
||||
user=d.user,
|
||||
group=d.group,
|
||||
make_parents=True)
|
||||
d.path, user=d.user, group=d.group, make_parents=True
|
||||
)
|
||||
|
||||
def init_service(self, context: sunbeam_core.OPSCharmContexts) -> None:
|
||||
"""Initialise service ready for use.
|
||||
@ -179,9 +184,7 @@ class PebbleHandler(ops.charm.Object):
|
||||
@property
|
||||
def pebble_ready(self) -> bool:
|
||||
"""Determine if pebble is running and ready for use."""
|
||||
return self.charm.unit.get_container(
|
||||
self.container_name
|
||||
).can_connect()
|
||||
return self.charm.unit.get_container(self.container_name).can_connect()
|
||||
|
||||
@property
|
||||
def config_pushed(self) -> bool:
|
||||
@ -193,8 +196,9 @@ class PebbleHandler(ops.charm.Object):
|
||||
"""Determine whether the service the container provides is running."""
|
||||
return self._state.service_ready
|
||||
|
||||
def execute(self, cmd: List, exception_on_error: bool = False,
|
||||
**kwargs: TypedDict) -> str:
|
||||
def execute(
|
||||
self, cmd: List, exception_on_error: bool = False, **kwargs: TypedDict
|
||||
) -> str:
|
||||
"""Execute given command in container managed by this handler.
|
||||
|
||||
:param cmd: command to execute, specified as a list of strings
|
||||
@ -211,15 +215,15 @@ class PebbleHandler(ops.charm.Object):
|
||||
stdout, _ = process.wait_output()
|
||||
# Not logging the command in case it included a password,
|
||||
# too cautious ?
|
||||
logger.debug('Command complete')
|
||||
logger.debug("Command complete")
|
||||
if stdout:
|
||||
for line in stdout.splitlines():
|
||||
logger.debug(' %s', line)
|
||||
logger.debug(" %s", line)
|
||||
return stdout
|
||||
except ops.pebble.ExecError as e:
|
||||
logger.error('Exited with code %d. Stderr:', e.exit_code)
|
||||
logger.error("Exited with code %d. Stderr:", e.exit_code)
|
||||
for line in e.stderr.splitlines():
|
||||
logger.error(' %s', line)
|
||||
logger.error(" %s", line)
|
||||
if exception_on_error:
|
||||
raise
|
||||
|
||||
@ -236,7 +240,8 @@ class PebbleHandler(ops.charm.Object):
|
||||
if not plan.checks:
|
||||
logger.debug("Adding healthcheck layer to the plan")
|
||||
container.add_layer(
|
||||
"healthchecks", healthcheck_layer, combine=True)
|
||||
"healthchecks", healthcheck_layer, combine=True
|
||||
)
|
||||
except ops.pebble.ConnectionError as connect_error:
|
||||
logger.error("Not able to add Healthcheck layer")
|
||||
logger.exception(connect_error)
|
||||
@ -263,17 +268,19 @@ class PebbleHandler(ops.charm.Object):
|
||||
|
||||
# Verify alive checks if ready checks are missing
|
||||
if not checks:
|
||||
checks = container.get_checks(
|
||||
level=ops.pebble.CheckLevel.ALIVE)
|
||||
checks = container.get_checks(level=ops.pebble.CheckLevel.ALIVE)
|
||||
for name, check in checks.items():
|
||||
if check.status != ops.pebble.CheckStatus.UP:
|
||||
failed.append(name)
|
||||
|
||||
if failed:
|
||||
self.status.set(BlockedStatus('healthcheck{} failed: {}'.format(
|
||||
's' if len(failed) > 1 else '',
|
||||
', '.join(failed)
|
||||
)))
|
||||
self.status.set(
|
||||
BlockedStatus(
|
||||
"healthcheck{} failed: {}".format(
|
||||
"s" if len(failed) > 1 else "", ", ".join(failed)
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
self.status.set(ActiveStatus(""))
|
||||
@ -288,9 +295,10 @@ class PebbleHandler(ops.charm.Object):
|
||||
for service_name, service in services.items():
|
||||
if service.is_running() and restart:
|
||||
logger.debug(
|
||||
f'Stopping {service_name} in {self.container_name}')
|
||||
f"Stopping {service_name} in {self.container_name}"
|
||||
)
|
||||
container.stop(service_name)
|
||||
logger.debug(f'Starting {service_name} in {self.container_name}')
|
||||
logger.debug(f"Starting {service_name} in {self.container_name}")
|
||||
container.start(service_name)
|
||||
|
||||
|
||||
@ -318,14 +326,15 @@ class ServicePebbleHandler(PebbleHandler):
|
||||
"""
|
||||
container = self.charm.unit.get_container(self.container_name)
|
||||
if not container:
|
||||
logger.debug(f'{self.container_name} container is not ready. '
|
||||
'Cannot start service.')
|
||||
logger.debug(
|
||||
f"{self.container_name} container is not ready. "
|
||||
"Cannot start service."
|
||||
)
|
||||
return
|
||||
if self.service_name not in container.get_services().keys():
|
||||
container.add_layer(
|
||||
self.service_name,
|
||||
self.get_layer(),
|
||||
combine=True)
|
||||
self.service_name, self.get_layer(), combine=True
|
||||
)
|
||||
self._start_all(restart=restart)
|
||||
|
||||
|
||||
@ -369,9 +378,8 @@ class WSGIPebbleHandler(PebbleHandler):
|
||||
return
|
||||
if self.wsgi_service_name not in container.get_services().keys():
|
||||
container.add_layer(
|
||||
self.service_name,
|
||||
self.get_layer(),
|
||||
combine=True)
|
||||
self.service_name, self.get_layer(), combine=True
|
||||
)
|
||||
self._start_all(restart=restart)
|
||||
|
||||
def start_service(self) -> None:
|
||||
@ -409,16 +417,12 @@ class WSGIPebbleHandler(PebbleHandler):
|
||||
"period": "10s",
|
||||
"timeout": "3s",
|
||||
"threshold": 3,
|
||||
"exec": {
|
||||
"command": "service apache2 status"
|
||||
}
|
||||
"exec": {"command": "service apache2 status"},
|
||||
},
|
||||
"online": {
|
||||
"override": "replace",
|
||||
"level": "ready",
|
||||
"http": {
|
||||
"url": self.charm.healthcheck_http_url
|
||||
}
|
||||
"http": {"url": self.charm.healthcheck_http_url},
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -429,13 +433,13 @@ class WSGIPebbleHandler(PebbleHandler):
|
||||
files_changed = self.write_config(context)
|
||||
try:
|
||||
process = container.exec(
|
||||
['a2ensite', self.wsgi_service_name],
|
||||
timeout=5*60)
|
||||
["a2ensite", self.wsgi_service_name], timeout=5 * 60
|
||||
)
|
||||
out, warnings = process.wait_output()
|
||||
if warnings:
|
||||
for line in warnings.splitlines():
|
||||
logger.warning('a2ensite warn: %s', line.strip())
|
||||
logging.debug(f'Output from a2ensite: \n{out}')
|
||||
logger.warning("a2ensite warn: %s", line.strip())
|
||||
logging.debug(f"Output from a2ensite: \n{out}")
|
||||
except ops.pebble.ExecError:
|
||||
logger.exception(
|
||||
f"Failed to enable {self.wsgi_service_name} site in apache"
|
||||
@ -459,7 +463,5 @@ class WSGIPebbleHandler(PebbleHandler):
|
||||
) -> List[sunbeam_core.ContainerConfigFile]:
|
||||
"""Container configs for WSGI service."""
|
||||
return [
|
||||
sunbeam_core.ContainerConfigFile(
|
||||
self.wsgi_conf, "root", "root"
|
||||
)
|
||||
sunbeam_core.ContainerConfigFile(self.wsgi_conf, "root", "root")
|
||||
]
|
||||
|
@ -15,17 +15,29 @@
|
||||
"""Collection of core components."""
|
||||
|
||||
import collections
|
||||
from typing import Generator, List, TYPE_CHECKING, Tuple, Union
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Generator,
|
||||
List,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ops_sunbeam.charm import OSBaseOperatorCharm
|
||||
from ops_sunbeam.config_contexts import ConfigContext
|
||||
from ops_sunbeam.relation_handlers import RelationHandler
|
||||
from ops_sunbeam.charm import (
|
||||
OSBaseOperatorCharm,
|
||||
)
|
||||
from ops_sunbeam.config_contexts import (
|
||||
ConfigContext,
|
||||
)
|
||||
from ops_sunbeam.relation_handlers import (
|
||||
RelationHandler,
|
||||
)
|
||||
|
||||
ContainerConfigFile = collections.namedtuple(
|
||||
"ContainerConfigFile",
|
||||
["path", "user", "group", "permissions"],
|
||||
defaults=(None,)
|
||||
defaults=(None,),
|
||||
)
|
||||
|
||||
|
||||
@ -61,7 +73,7 @@ class OPSCharmContexts:
|
||||
def add_config_context(
|
||||
self, config_adapter: "ConfigContext", namespace: str
|
||||
) -> None:
|
||||
"""Add add config adapater to context."""
|
||||
"""Add add config adapter to context."""
|
||||
self.namespaces.append(namespace)
|
||||
setattr(self, namespace, config_adapter)
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright 2021, Canonical Ltd.
|
||||
# Copyright 2021 Canonical Ltd.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -15,21 +15,27 @@
|
||||
"""Module to handle errors and bailing out of an event/hook."""
|
||||
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
from contextlib import (
|
||||
contextmanager,
|
||||
)
|
||||
|
||||
from ops.charm import CharmBase
|
||||
from ops.model import BlockedStatus
|
||||
from ops.charm import (
|
||||
CharmBase,
|
||||
)
|
||||
from ops.model import (
|
||||
BlockedStatus,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GuardException(Exception):
|
||||
class GuardExceptionError(Exception):
|
||||
"""GuardException."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class BlockedException(Exception):
|
||||
class BlockedExceptionError(Exception):
|
||||
"""Charm is blocked."""
|
||||
|
||||
pass
|
||||
@ -41,7 +47,7 @@ def guard(
|
||||
section: str,
|
||||
handle_exception: bool = True,
|
||||
log_traceback: bool = True,
|
||||
**__
|
||||
**__,
|
||||
) -> None:
|
||||
"""Context manager to handle errors and bailing out of an event/hook.
|
||||
|
||||
@ -62,13 +68,13 @@ def guard(
|
||||
try:
|
||||
yield
|
||||
logging.info("Completed guarded section fully: '%s'", section)
|
||||
except GuardException as e:
|
||||
except GuardExceptionError as e:
|
||||
logger.info(
|
||||
"Guarded Section: Early exit from '%s' due to '%s'.",
|
||||
section,
|
||||
str(e),
|
||||
)
|
||||
except BlockedException as e:
|
||||
except BlockedExceptionError as e:
|
||||
logger.warning(
|
||||
"Charm is blocked in section '%s' due to '%s'", section, str(e)
|
||||
)
|
||||
@ -77,14 +83,16 @@ def guard(
|
||||
# something else went wrong
|
||||
if handle_exception:
|
||||
logging.error(
|
||||
"Exception raised in secion '%s': %s", section, str(e)
|
||||
"Exception raised in section '%s': %s", section, str(e)
|
||||
)
|
||||
if log_traceback:
|
||||
import traceback
|
||||
|
||||
logging.error(traceback.format_exc())
|
||||
charm.status.set(BlockedStatus(
|
||||
"Error in charm (see logs): {}".format(str(e))
|
||||
))
|
||||
charm.status.set(
|
||||
BlockedStatus(
|
||||
"Error in charm (see logs): {}".format(str(e))
|
||||
)
|
||||
)
|
||||
return
|
||||
raise
|
||||
|
@ -15,15 +15,20 @@
|
||||
"""Common interfaces not charm specific."""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Optional
|
||||
from typing import (
|
||||
Dict,
|
||||
List,
|
||||
Optional,
|
||||
)
|
||||
|
||||
import ops.model
|
||||
|
||||
from ops.framework import EventBase
|
||||
from ops.framework import EventSource
|
||||
from ops.framework import Object
|
||||
from ops.framework import ObjectEvents
|
||||
from ops.framework import StoredState
|
||||
from ops.framework import (
|
||||
EventBase,
|
||||
EventSource,
|
||||
Object,
|
||||
ObjectEvents,
|
||||
StoredState,
|
||||
)
|
||||
|
||||
|
||||
class PeersRelationCreatedEvent(EventBase):
|
||||
|
@ -14,11 +14,13 @@
|
||||
|
||||
"""Base classes for defining an OVN charm using the Operator framework."""
|
||||
|
||||
from typing import List
|
||||
from typing import (
|
||||
List,
|
||||
)
|
||||
|
||||
from . import relation_handlers as ovn_relation_handlers
|
||||
from .. import relation_handlers as sunbeam_rhandlers
|
||||
from .. import charm as sunbeam_charm
|
||||
from .. import relation_handlers as sunbeam_rhandlers
|
||||
from . import relation_handlers as ovn_relation_handlers
|
||||
|
||||
|
||||
class OSBaseOVNOperatorCharm(sunbeam_charm.OSBaseOperatorCharm):
|
||||
|
@ -28,7 +28,8 @@ class OVNDBConfigContext(sunbeam_ccontexts.ConfigContext):
|
||||
def context(self) -> dict:
|
||||
"""Context for OVN certs and leadership."""
|
||||
return {
|
||||
'is_charm_leader': self.charm.unit.is_leader(),
|
||||
'ovn_key': '/etc/ovn/key_host',
|
||||
'ovn_cert': '/etc/ovn/cert_host',
|
||||
'ovn_ca_cert': '/etc/ovn/ovn-central.crt'}
|
||||
"is_charm_leader": self.charm.unit.is_leader(),
|
||||
"ovn_key": "/etc/ovn/key_host",
|
||||
"ovn_cert": "/etc/ovn/cert_host",
|
||||
"ovn_ca_cert": "/etc/ovn/ovn-central.crt",
|
||||
}
|
||||
|
@ -14,11 +14,13 @@
|
||||
|
||||
"""Base classes for defining OVN Pebble handlers."""
|
||||
|
||||
from typing import (
|
||||
List,
|
||||
)
|
||||
|
||||
from .. import container_handlers as sunbeam_chandlers
|
||||
from .. import core as sunbeam_core
|
||||
|
||||
from typing import List
|
||||
|
||||
|
||||
class OVNPebbleHandler(sunbeam_chandlers.ServicePebbleHandler):
|
||||
"""Common class for OVN services."""
|
||||
@ -47,7 +49,7 @@ class OVNPebbleHandler(sunbeam_chandlers.ServicePebbleHandler):
|
||||
|
||||
@property
|
||||
def service_description(self) -> str:
|
||||
"""Return a short decription of service e.g. OVN Southbound DB."""
|
||||
"""Return a short description of service e.g. OVN Southbound DB."""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_layer(self) -> dict:
|
||||
@ -58,8 +60,9 @@ class OVNPebbleHandler(sunbeam_chandlers.ServicePebbleHandler):
|
||||
"""
|
||||
return {
|
||||
"summary": f"{self.service_description} service",
|
||||
"description": ("Pebble config layer for "
|
||||
f"{self.service_description}"),
|
||||
"description": (
|
||||
"Pebble config layer for " f"{self.service_description}"
|
||||
),
|
||||
"services": {
|
||||
self.service_name: {
|
||||
"override": "replace",
|
||||
@ -81,9 +84,7 @@ class OVNPebbleHandler(sunbeam_chandlers.ServicePebbleHandler):
|
||||
"online": {
|
||||
"override": "replace",
|
||||
"level": "ready",
|
||||
"exec": {
|
||||
"command": f"{self.status_command}"
|
||||
}
|
||||
"exec": {"command": f"{self.status_command}"},
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -92,41 +93,27 @@ class OVNPebbleHandler(sunbeam_chandlers.ServicePebbleHandler):
|
||||
def directories(self) -> List[sunbeam_chandlers.ContainerDir]:
|
||||
"""Directories to creete in container."""
|
||||
return [
|
||||
sunbeam_chandlers.ContainerDir(
|
||||
'/etc/ovn',
|
||||
'root',
|
||||
'root'),
|
||||
sunbeam_chandlers.ContainerDir(
|
||||
'/run/ovn',
|
||||
'root',
|
||||
'root'),
|
||||
sunbeam_chandlers.ContainerDir(
|
||||
'/var/lib/ovn',
|
||||
'root',
|
||||
'root'),
|
||||
sunbeam_chandlers.ContainerDir(
|
||||
'/var/log/ovn',
|
||||
'root',
|
||||
'root')]
|
||||
sunbeam_chandlers.ContainerDir("/etc/ovn", "root", "root"),
|
||||
sunbeam_chandlers.ContainerDir("/run/ovn", "root", "root"),
|
||||
sunbeam_chandlers.ContainerDir("/var/lib/ovn", "root", "root"),
|
||||
sunbeam_chandlers.ContainerDir("/var/log/ovn", "root", "root"),
|
||||
]
|
||||
|
||||
def default_container_configs(
|
||||
self
|
||||
self,
|
||||
) -> List[sunbeam_core.ContainerConfigFile]:
|
||||
"""Files to render into containers."""
|
||||
return [
|
||||
sunbeam_core.ContainerConfigFile(
|
||||
self.wrapper_script,
|
||||
'root',
|
||||
'root'),
|
||||
self.wrapper_script, "root", "root"
|
||||
),
|
||||
sunbeam_core.ContainerConfigFile(
|
||||
'/etc/ovn/key_host',
|
||||
'root',
|
||||
'root'),
|
||||
"/etc/ovn/key_host", "root", "root"
|
||||
),
|
||||
sunbeam_core.ContainerConfigFile(
|
||||
'/etc/ovn/cert_host',
|
||||
'root',
|
||||
'root'),
|
||||
"/etc/ovn/cert_host", "root", "root"
|
||||
),
|
||||
sunbeam_core.ContainerConfigFile(
|
||||
'/etc/ovn/ovn-central.crt',
|
||||
'root',
|
||||
'root')]
|
||||
"/etc/ovn/ovn-central.crt", "root", "root"
|
||||
),
|
||||
]
|
||||
|
@ -16,10 +16,14 @@
|
||||
|
||||
import ipaddress
|
||||
import itertools
|
||||
import socket
|
||||
import logging
|
||||
|
||||
from typing import Callable, Dict, Iterator, List
|
||||
import socket
|
||||
from typing import (
|
||||
Callable,
|
||||
Dict,
|
||||
Iterator,
|
||||
List,
|
||||
)
|
||||
|
||||
import ops.charm
|
||||
import ops.framework
|
||||
@ -29,7 +33,7 @@ from .. import relation_handlers as sunbeam_rhandlers
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OVNRelationUtils():
|
||||
class OVNRelationUtils:
|
||||
"""Common utilities for processing OVN relations."""
|
||||
|
||||
DB_NB_PORT = 6641
|
||||
@ -49,9 +53,9 @@ class OVNRelationUtils():
|
||||
"""
|
||||
ipaddr = ipaddress.ip_address(addr)
|
||||
if isinstance(ipaddr, ipaddress.IPv6Address):
|
||||
fmt = '[{}]'
|
||||
fmt = "[{}]"
|
||||
else:
|
||||
fmt = '{}'
|
||||
fmt = "{}"
|
||||
return fmt.format(ipaddr)
|
||||
|
||||
def _remote_addrs(self, key: str) -> Iterator[str]:
|
||||
@ -87,7 +91,7 @@ class OVNRelationUtils():
|
||||
:returns: hostnames bound to remote endpoints.
|
||||
:rtype: Iterator[str]
|
||||
"""
|
||||
return self._remote_hostnames('bound-hostname')
|
||||
return self._remote_hostnames("bound-hostname")
|
||||
|
||||
@property
|
||||
def cluster_remote_addrs(self) -> Iterator[str]:
|
||||
@ -96,13 +100,11 @@ class OVNRelationUtils():
|
||||
:returns: addresses bound to remote endpoints.
|
||||
:rtype: Iterator[str]
|
||||
"""
|
||||
return self._remote_addrs('bound-address')
|
||||
return self._remote_addrs("bound-address")
|
||||
|
||||
def db_connection_strs(
|
||||
self,
|
||||
hostnames: List[str],
|
||||
port: int,
|
||||
proto: str = 'ssl') -> Iterator[str]:
|
||||
self, hostnames: List[str], port: int, proto: str = "ssl"
|
||||
) -> Iterator[str]:
|
||||
"""Provide connection strings.
|
||||
|
||||
:param hostnames: List of hostnames to include in conn strs
|
||||
@ -115,7 +117,7 @@ class OVNRelationUtils():
|
||||
:rtype: Iterator[str]
|
||||
"""
|
||||
for hostname in hostnames:
|
||||
yield ':'.join((proto, str(hostname), str(port)))
|
||||
yield ":".join((proto, str(hostname), str(port)))
|
||||
|
||||
@property
|
||||
def db_nb_port(self) -> int:
|
||||
@ -173,8 +175,9 @@ class OVNRelationUtils():
|
||||
:returns: OVN Northbound OVSDB connection strings.
|
||||
:rtpye: Iterator[str]
|
||||
"""
|
||||
return self.db_connection_strs(self.cluster_remote_addrs,
|
||||
self.db_nb_port)
|
||||
return self.db_connection_strs(
|
||||
self.cluster_remote_addrs, self.db_nb_port
|
||||
)
|
||||
|
||||
@property
|
||||
def db_sb_connection_strs(self) -> Iterator[str]:
|
||||
@ -183,8 +186,9 @@ class OVNRelationUtils():
|
||||
:returns: OVN Southbound OVSDB connection strings.
|
||||
:rtpye: Iterator[str]
|
||||
"""
|
||||
return self.db_connection_strs(self.cluster_remote_addrs,
|
||||
self.db_sb_port)
|
||||
return self.db_connection_strs(
|
||||
self.cluster_remote_addrs, self.db_sb_port
|
||||
)
|
||||
|
||||
@property
|
||||
def db_nb_connection_hostname_strs(self) -> Iterator[str]:
|
||||
@ -193,8 +197,9 @@ class OVNRelationUtils():
|
||||
:returns: OVN Northbound OVSDB connection strings.
|
||||
:rtpye: Iterator[str]
|
||||
"""
|
||||
return self.db_connection_strs(self.cluster_remote_hostnames,
|
||||
self.db_nb_port)
|
||||
return self.db_connection_strs(
|
||||
self.cluster_remote_hostnames, self.db_nb_port
|
||||
)
|
||||
|
||||
@property
|
||||
def db_sb_connection_hostname_strs(self) -> Iterator[str]:
|
||||
@ -203,8 +208,9 @@ class OVNRelationUtils():
|
||||
:returns: OVN Southbound OVSDB connection strings.
|
||||
:rtpye: Iterator[str]
|
||||
"""
|
||||
return self.db_connection_strs(self.cluster_remote_hostnames,
|
||||
self.db_sb_port)
|
||||
return self.db_connection_strs(
|
||||
self.cluster_remote_hostnames, self.db_sb_port
|
||||
)
|
||||
|
||||
@property
|
||||
def cluster_local_addr(self) -> ipaddress.IPv4Address:
|
||||
@ -237,13 +243,12 @@ class OVNRelationUtils():
|
||||
return addr
|
||||
|
||||
|
||||
class OVNDBClusterPeerHandler(sunbeam_rhandlers.BasePeerHandler,
|
||||
OVNRelationUtils):
|
||||
class OVNDBClusterPeerHandler(
|
||||
sunbeam_rhandlers.BasePeerHandler, OVNRelationUtils
|
||||
):
|
||||
"""Handle OVN peer relation."""
|
||||
|
||||
def publish_cluster_local_hostname(
|
||||
self,
|
||||
hostname: str = None) -> Dict:
|
||||
def publish_cluster_local_hostname(self, hostname: str = None) -> Dict:
|
||||
"""Announce hostname on relation.
|
||||
|
||||
This will be used by our peers and clients to build a connection
|
||||
@ -254,7 +259,7 @@ class OVNDBClusterPeerHandler(sunbeam_rhandlers.BasePeerHandler,
|
||||
"""
|
||||
_hostname = hostname or self.cluster_local_hostname
|
||||
if _hostname:
|
||||
self.interface.set_unit_data({'bound-hostname': str(_hostname)})
|
||||
self.interface.set_unit_data({"bound-hostname": str(_hostname)})
|
||||
|
||||
def expected_peers_available(self) -> bool:
|
||||
"""Whether expected peers have joined and published data on peer rel.
|
||||
@ -272,17 +277,20 @@ class OVNDBClusterPeerHandler(sunbeam_rhandlers.BasePeerHandler,
|
||||
if len(joined_units) < expected_remote_units:
|
||||
logging.debug(
|
||||
f"Expected {expected_remote_units} but only {joined_units} "
|
||||
"have joined so far")
|
||||
"have joined so far"
|
||||
)
|
||||
return False
|
||||
hostnames = self.interface.get_all_unit_values('bound-hostname')
|
||||
hostnames = self.interface.get_all_unit_values("bound-hostname")
|
||||
if all(hostnames) < expected_remote_units:
|
||||
logging.debug(
|
||||
"Not all units have published a bound-hostname. Current "
|
||||
f"hostname list: {hostnames}")
|
||||
f"hostname list: {hostnames}"
|
||||
)
|
||||
return False
|
||||
else:
|
||||
logging.debug(
|
||||
f"All expected peers are present. Hostnames: {hostnames}")
|
||||
f"All expected peers are present. Hostnames: {hostnames}"
|
||||
)
|
||||
return True
|
||||
|
||||
@property
|
||||
@ -296,10 +304,13 @@ class OVNDBClusterPeerHandler(sunbeam_rhandlers.BasePeerHandler,
|
||||
:rtype: Iterator[str]
|
||||
"""
|
||||
return itertools.chain(
|
||||
self.db_connection_strs((self.cluster_local_hostname,),
|
||||
self.db_nb_port),
|
||||
self.db_connection_strs(self.cluster_remote_hostnames,
|
||||
self.db_nb_port))
|
||||
self.db_connection_strs(
|
||||
(self.cluster_local_hostname,), self.db_nb_port
|
||||
),
|
||||
self.db_connection_strs(
|
||||
self.cluster_remote_hostnames, self.db_nb_port
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def db_nb_cluster_connection_strs(self) -> Iterator[str]:
|
||||
@ -312,10 +323,13 @@ class OVNDBClusterPeerHandler(sunbeam_rhandlers.BasePeerHandler,
|
||||
:rtype: Iterator[str]
|
||||
"""
|
||||
return itertools.chain(
|
||||
self.db_connection_strs((self.cluster_local_hostname,),
|
||||
self.db_nb_cluster_port),
|
||||
self.db_connection_strs(self.cluster_remote_hostnames,
|
||||
self.db_nb_cluster_port))
|
||||
self.db_connection_strs(
|
||||
(self.cluster_local_hostname,), self.db_nb_cluster_port
|
||||
),
|
||||
self.db_connection_strs(
|
||||
self.cluster_remote_hostnames, self.db_nb_cluster_port
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def db_sb_cluster_connection_strs(self) -> Iterator[str]:
|
||||
@ -328,10 +342,13 @@ class OVNDBClusterPeerHandler(sunbeam_rhandlers.BasePeerHandler,
|
||||
:rtype: Iterator[str]
|
||||
"""
|
||||
return itertools.chain(
|
||||
self.db_connection_strs((self.cluster_local_hostname,),
|
||||
self.db_sb_cluster_port),
|
||||
self.db_connection_strs(self.cluster_remote_hostnames,
|
||||
self.db_sb_cluster_port))
|
||||
self.db_connection_strs(
|
||||
(self.cluster_local_hostname,), self.db_sb_cluster_port
|
||||
),
|
||||
self.db_connection_strs(
|
||||
self.cluster_remote_hostnames, self.db_sb_cluster_port
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def db_sb_connection_strs(self) -> Iterator[str]:
|
||||
@ -346,35 +363,41 @@ class OVNDBClusterPeerHandler(sunbeam_rhandlers.BasePeerHandler,
|
||||
:rtype: Iterator[str]
|
||||
"""
|
||||
return itertools.chain(
|
||||
self.db_connection_strs((self.cluster_local_hostname,),
|
||||
self.db_sb_admin_port),
|
||||
self.db_connection_strs(self.cluster_remote_hostnames,
|
||||
self.db_sb_admin_port))
|
||||
self.db_connection_strs(
|
||||
(self.cluster_local_hostname,), self.db_sb_admin_port
|
||||
),
|
||||
self.db_connection_strs(
|
||||
self.cluster_remote_hostnames, self.db_sb_admin_port
|
||||
),
|
||||
)
|
||||
|
||||
def _on_peers_relation_joined(
|
||||
self, event: ops.framework.EventBase) -> None:
|
||||
self, event: ops.framework.EventBase
|
||||
) -> None:
|
||||
"""Process peer joined event."""
|
||||
self.publish_cluster_local_hostname()
|
||||
|
||||
def context(self) -> dict:
|
||||
"""Context from relation data."""
|
||||
ctxt = super().context()
|
||||
ctxt.update({
|
||||
'cluster_local_hostname': self.cluster_local_hostname,
|
||||
'cluster_remote_hostnames': self.cluster_remote_hostnames,
|
||||
'db_nb_cluster_connection_strs':
|
||||
self.db_nb_cluster_connection_strs,
|
||||
'db_sb_cluster_connection_strs':
|
||||
self.db_sb_cluster_connection_strs,
|
||||
'db_sb_cluster_port': self.db_sb_cluster_port,
|
||||
'db_nb_cluster_port': self.db_nb_cluster_port,
|
||||
'db_nb_connection_strs': list(self.db_nb_connection_strs),
|
||||
'db_sb_connection_strs': list(self.db_sb_connection_strs)})
|
||||
ctxt.update(
|
||||
{
|
||||
"cluster_local_hostname": self.cluster_local_hostname,
|
||||
"cluster_remote_hostnames": self.cluster_remote_hostnames,
|
||||
"db_nb_cluster_connection_strs": self.db_nb_cluster_connection_strs,
|
||||
"db_sb_cluster_connection_strs": self.db_sb_cluster_connection_strs,
|
||||
"db_sb_cluster_port": self.db_sb_cluster_port,
|
||||
"db_nb_cluster_port": self.db_nb_cluster_port,
|
||||
"db_nb_connection_strs": list(self.db_nb_connection_strs),
|
||||
"db_sb_connection_strs": list(self.db_sb_connection_strs),
|
||||
}
|
||||
)
|
||||
return ctxt
|
||||
|
||||
|
||||
class OVSDBCMSProvidesHandler(sunbeam_rhandlers.RelationHandler,
|
||||
OVNRelationUtils):
|
||||
class OVSDBCMSProvidesHandler(
|
||||
sunbeam_rhandlers.RelationHandler, OVNRelationUtils
|
||||
):
|
||||
"""Handle provides side of ovsdb-cms."""
|
||||
|
||||
def __init__(
|
||||
@ -394,13 +417,14 @@ class OVSDBCMSProvidesHandler(sunbeam_rhandlers.RelationHandler,
|
||||
# has this relation.
|
||||
logger.debug("Setting up ovs-cms provides event handler")
|
||||
import charms.ovn_central_k8s.v0.ovsdb as ovsdb
|
||||
|
||||
ovsdb_svc = ovsdb.OVSDBCMSProvides(
|
||||
self.charm,
|
||||
self.relation_name,
|
||||
)
|
||||
self.framework.observe(
|
||||
ovsdb_svc.on.ready,
|
||||
self._on_ovsdb_service_ready)
|
||||
ovsdb_svc.on.ready, self._on_ovsdb_service_ready
|
||||
)
|
||||
return ovsdb_svc
|
||||
|
||||
def _on_ovsdb_service_ready(self, event: ops.framework.EventBase) -> None:
|
||||
@ -409,10 +433,12 @@ class OVSDBCMSProvidesHandler(sunbeam_rhandlers.RelationHandler,
|
||||
|
||||
def _update_address_data(self) -> None:
|
||||
"""Update hostname and IP address data on all relations."""
|
||||
self.interface.set_unit_data({
|
||||
'bound-hostname': str(self.cluster_local_hostname),
|
||||
'bound-address': str(self.cluster_local_addr),
|
||||
})
|
||||
self.interface.set_unit_data(
|
||||
{
|
||||
"bound-hostname": str(self.cluster_local_hostname),
|
||||
"bound-address": str(self.cluster_local_addr),
|
||||
}
|
||||
)
|
||||
|
||||
@property
|
||||
def ready(self) -> bool:
|
||||
@ -420,8 +446,9 @@ class OVSDBCMSProvidesHandler(sunbeam_rhandlers.RelationHandler,
|
||||
return True
|
||||
|
||||
|
||||
class OVSDBCMSRequiresHandler(sunbeam_rhandlers.RelationHandler,
|
||||
OVNRelationUtils):
|
||||
class OVSDBCMSRequiresHandler(
|
||||
sunbeam_rhandlers.RelationHandler, OVNRelationUtils
|
||||
):
|
||||
"""Handle provides side of ovsdb-cms."""
|
||||
|
||||
def __init__(
|
||||
@ -440,13 +467,14 @@ class OVSDBCMSRequiresHandler(sunbeam_rhandlers.RelationHandler,
|
||||
# has this relation.
|
||||
logger.debug("Setting up ovs-cms requires event handler")
|
||||
import charms.ovn_central_k8s.v0.ovsdb as ovsdb
|
||||
|
||||
ovsdb_svc = ovsdb.OVSDBCMSRequires(
|
||||
self.charm,
|
||||
self.relation_name,
|
||||
)
|
||||
self.framework.observe(
|
||||
ovsdb_svc.on.ready,
|
||||
self._on_ovsdb_service_ready)
|
||||
ovsdb_svc.on.ready, self._on_ovsdb_service_ready
|
||||
)
|
||||
return ovsdb_svc
|
||||
|
||||
def _on_ovsdb_service_ready(self, event: ops.framework.EventBase) -> None:
|
||||
@ -461,17 +489,21 @@ class OVSDBCMSRequiresHandler(sunbeam_rhandlers.RelationHandler,
|
||||
def context(self) -> dict:
|
||||
"""Context from relation data."""
|
||||
ctxt = super().context()
|
||||
ctxt.update({
|
||||
'local_hostname': self.cluster_local_hostname,
|
||||
'hostnames': self.interface.bound_hostnames(),
|
||||
'local_address': self.cluster_local_addr,
|
||||
'addresses': self.interface.bound_addresses(),
|
||||
'db_sb_connection_strs': ','.join(self.db_sb_connection_strs),
|
||||
'db_nb_connection_strs': ','.join(self.db_nb_connection_strs),
|
||||
'db_sb_connection_hostname_strs':
|
||||
','.join(self.db_sb_connection_hostname_strs),
|
||||
'db_nb_connection_hostname_strs':
|
||||
','.join(self.db_nb_connection_hostname_strs)
|
||||
})
|
||||
ctxt.update(
|
||||
{
|
||||
"local_hostname": self.cluster_local_hostname,
|
||||
"hostnames": self.interface.bound_hostnames(),
|
||||
"local_address": self.cluster_local_addr,
|
||||
"addresses": self.interface.bound_addresses(),
|
||||
"db_sb_connection_strs": ",".join(self.db_sb_connection_strs),
|
||||
"db_nb_connection_strs": ",".join(self.db_nb_connection_strs),
|
||||
"db_sb_connection_hostname_strs": ",".join(
|
||||
self.db_sb_connection_hostname_strs
|
||||
),
|
||||
"db_nb_connection_hostname_strs": ",".join(
|
||||
self.db_nb_connection_hostname_strs
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
return ctxt
|
||||
|
@ -16,21 +16,33 @@
|
||||
|
||||
import json
|
||||
import logging
|
||||
import cryptography.hazmat.primitives.serialization as serialization
|
||||
from typing import Callable, List, Tuple, Optional
|
||||
from urllib.parse import urlparse
|
||||
from typing import (
|
||||
Callable,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
)
|
||||
from urllib.parse import (
|
||||
urlparse,
|
||||
)
|
||||
|
||||
import cryptography.hazmat.primitives.serialization as serialization
|
||||
import ops.charm
|
||||
import ops.framework
|
||||
from ops.model import BlockedStatus, ActiveStatus, WaitingStatus, UnknownStatus
|
||||
from ops.model import (
|
||||
ActiveStatus,
|
||||
BlockedStatus,
|
||||
UnknownStatus,
|
||||
WaitingStatus,
|
||||
)
|
||||
|
||||
import ops_sunbeam.interfaces as sunbeam_interfaces
|
||||
import ops_sunbeam.compound_status as compound_status
|
||||
import ops_sunbeam.interfaces as sunbeam_interfaces
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ERASURE_CODED = "erasure-coded"
|
||||
REPLICATED = "replacated"
|
||||
REPLICATED = "replicated"
|
||||
|
||||
|
||||
class RelationHandler(ops.charm.Object):
|
||||
@ -42,9 +54,9 @@ class RelationHandler(ops.charm.Object):
|
||||
1) Registering handlers to process events from the interface. The last
|
||||
step of these handlers is to make a callback to a specified method
|
||||
within the charm `callback_f`
|
||||
2) Expose a `ready` property so the charm can check a relations readyness
|
||||
2) Expose a `ready` property so the charm can check a relations readiness
|
||||
3) A `context` method which returns a dict which pulls together data
|
||||
recieved and sent on an interface.
|
||||
received and sent on an interface.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@ -59,7 +71,7 @@ class RelationHandler(ops.charm.Object):
|
||||
charm,
|
||||
# Ensure we can have multiple instances of a relation handler,
|
||||
# but only one per relation.
|
||||
key=type(self).__name__ + '_' + relation_name
|
||||
key=type(self).__name__ + "_" + relation_name,
|
||||
)
|
||||
self.charm = charm
|
||||
self.relation_name = relation_name
|
||||
@ -71,8 +83,7 @@ class RelationHandler(ops.charm.Object):
|
||||
self.set_status(status)
|
||||
|
||||
def set_status(self, status: compound_status.Status) -> None:
|
||||
"""
|
||||
Set the status based on current state.
|
||||
"""Set the status based on current state.
|
||||
|
||||
Will be called once, during construction,
|
||||
after everything else is initialised.
|
||||
@ -148,37 +159,34 @@ class IngressHandler(RelationHandler):
|
||||
def setup_event_handler(self) -> ops.charm.Object:
|
||||
"""Configure event handlers for an Ingress relation."""
|
||||
logger.debug("Setting up ingress event handler")
|
||||
from charms.traefik_k8s.v1.ingress import IngressPerAppRequirer
|
||||
from charms.traefik_k8s.v1.ingress import (
|
||||
IngressPerAppRequirer,
|
||||
)
|
||||
|
||||
interface = IngressPerAppRequirer(
|
||||
self.charm,
|
||||
self.relation_name,
|
||||
port=self.default_ingress_port,
|
||||
)
|
||||
self.framework.observe(
|
||||
interface.on.ready, self._on_ingress_ready
|
||||
)
|
||||
self.framework.observe(
|
||||
interface.on.revoked, self._on_ingress_revoked
|
||||
)
|
||||
self.framework.observe(interface.on.ready, self._on_ingress_ready)
|
||||
self.framework.observe(interface.on.revoked, self._on_ingress_revoked)
|
||||
return interface
|
||||
|
||||
def _on_ingress_ready(self, event) -> None: # noqa: ANN001
|
||||
"""
|
||||
Handle ingress relation changed events.
|
||||
"""Handle ingress relation changed events.
|
||||
|
||||
`event` is an instance of
|
||||
`charms.traefik_k8s.v1.ingress.IngressPerAppReadyEvent`.
|
||||
"""
|
||||
url = self.url
|
||||
logger.debug(f'Received url: {url}')
|
||||
logger.debug(f"Received url: {url}")
|
||||
if not url:
|
||||
return
|
||||
|
||||
self.callback_f(event)
|
||||
|
||||
def _on_ingress_revoked(self, event) -> None: # noqa: ANN001
|
||||
"""
|
||||
Handle ingress relation revoked event.
|
||||
"""Handle ingress relation revoked event.
|
||||
|
||||
`event` is an instance of
|
||||
`charms.traefik_k8s.v1.ingress.IngressPerAppRevokedEvent`
|
||||
@ -207,7 +215,7 @@ class IngressHandler(RelationHandler):
|
||||
"""Context containing ingress data."""
|
||||
parse_result = urlparse(self.url)
|
||||
return {
|
||||
'ingress_path': parse_result.path,
|
||||
"ingress_path": parse_result.path,
|
||||
}
|
||||
|
||||
|
||||
@ -242,26 +250,29 @@ class DBHandler(RelationHandler):
|
||||
# with a charm that doesn't want a DBHandler
|
||||
# and doesn't install this database_requires library.
|
||||
from charms.data_platform_libs.v0.database_requires import (
|
||||
DatabaseRequires
|
||||
DatabaseRequires,
|
||||
)
|
||||
|
||||
# Alias is required to events for this db
|
||||
# from trigger handlers for other dbs.
|
||||
# It also must be a valid python identifier.
|
||||
alias = self.relation_name.replace("-", "_")
|
||||
db = DatabaseRequires(
|
||||
self.charm, self.relation_name, self.database_name,
|
||||
relations_aliases=[alias]
|
||||
self.charm,
|
||||
self.relation_name,
|
||||
self.database_name,
|
||||
relations_aliases=[alias],
|
||||
)
|
||||
self.framework.observe(
|
||||
# db.on[f"{alias}_database_created"], # this doesn't work because:
|
||||
# RuntimeError: Framework.observe requires a BoundEvent as
|
||||
# second parameter, got <ops.framework.PrefixedEvents object ...
|
||||
getattr(db.on, f"{alias}_database_created"),
|
||||
self._on_database_updated
|
||||
self._on_database_updated,
|
||||
)
|
||||
self.framework.observe(
|
||||
getattr(db.on, f"{alias}_endpoints_changed"),
|
||||
self._on_database_updated
|
||||
self._on_database_updated,
|
||||
)
|
||||
# this will be set to self.interface in parent class
|
||||
return db
|
||||
@ -289,9 +300,9 @@ class DBHandler(RelationHandler):
|
||||
"""Whether the handler is ready for use."""
|
||||
data = self.get_relation_data()
|
||||
return bool(
|
||||
data.get("endpoints") and
|
||||
data.get("username") and
|
||||
data.get("password")
|
||||
data.get("endpoints")
|
||||
and data.get("username")
|
||||
and data.get("password")
|
||||
)
|
||||
|
||||
def context(self) -> dict:
|
||||
@ -356,6 +367,7 @@ class RabbitMQHandler(RelationHandler):
|
||||
# Lazy import to ensure this lib is only required if the charm
|
||||
# has this relation.
|
||||
import charms.rabbitmq_k8s.v0.rabbitmq as sunbeam_rabbitmq
|
||||
|
||||
amqp = sunbeam_rabbitmq.RabbitMQRequires(
|
||||
self.charm, self.relation_name, self.username, self.vhost
|
||||
)
|
||||
@ -433,6 +445,7 @@ class IdentityServiceRequiresHandler(RelationHandler):
|
||||
"""Configure event handlers for an Identity service relation."""
|
||||
logger.debug("Setting up Identity Service event handler")
|
||||
import charms.keystone_k8s.v0.identity_service as sun_id
|
||||
|
||||
id_svc = sun_id.IdentityServiceRequires(
|
||||
self.charm, self.relation_name, self.service_endpoints, self.region
|
||||
)
|
||||
@ -486,7 +499,8 @@ class BasePeerHandler(RelationHandler):
|
||||
return peer_int
|
||||
|
||||
def _on_peers_relation_joined(
|
||||
self, event: ops.framework.EventBase) -> None:
|
||||
self, event: ops.framework.EventBase
|
||||
) -> None:
|
||||
"""Process peer joined event."""
|
||||
self.callback_f(event)
|
||||
|
||||
@ -503,8 +517,9 @@ class BasePeerHandler(RelationHandler):
|
||||
"""Return all app data set on the peer relation."""
|
||||
try:
|
||||
_db = {
|
||||
k.replace('-', '_'): v
|
||||
for k, v in self.interface.get_all_app_data().items()}
|
||||
k.replace("-", "_"): v
|
||||
for k, v in self.interface.get_all_app_data().items()
|
||||
}
|
||||
return _db
|
||||
except AttributeError:
|
||||
return {}
|
||||
@ -563,6 +578,7 @@ class CephClientHandler(RelationHandler):
|
||||
# Lazy import to ensure this lib is only required if the charm
|
||||
# has this relation.
|
||||
import interface_ceph_client.ceph_client as ceph_client
|
||||
|
||||
ceph = ceph_client.CephClientRequires(
|
||||
self.charm,
|
||||
self.relation_name,
|
||||
@ -570,9 +586,7 @@ class CephClientHandler(RelationHandler):
|
||||
self.framework.observe(
|
||||
ceph.on.pools_available, self._on_pools_available
|
||||
)
|
||||
self.framework.observe(
|
||||
ceph.on.broker_available, self.request_pools
|
||||
)
|
||||
self.framework.observe(ceph.on.broker_available, self.request_pools)
|
||||
return ceph
|
||||
|
||||
def _on_pools_available(self, event: ops.framework.EventBase) -> None:
|
||||
@ -582,8 +596,7 @@ class CephClientHandler(RelationHandler):
|
||||
self.callback_f(event)
|
||||
|
||||
def request_pools(self, event: ops.framework.EventBase) -> None:
|
||||
"""
|
||||
Request Ceph pool creation when interface broker is ready.
|
||||
"""Request Ceph pool creation when interface broker is ready.
|
||||
|
||||
The default handler will automatically request erasure-coded
|
||||
or replicated pools depending on the configuration of the
|
||||
@ -595,9 +608,9 @@ class CephClientHandler(RelationHandler):
|
||||
"""
|
||||
config = self.model.config.get
|
||||
data_pool_name = (
|
||||
config("rbd-pool-name") or
|
||||
config("rbd-pool") or
|
||||
self.charm.app.name
|
||||
config("rbd-pool-name")
|
||||
or config("rbd-pool")
|
||||
or self.charm.app.name
|
||||
)
|
||||
metadata_pool_name = (
|
||||
config("ec-rbd-metadata-pool") or f"{self.charm.app.name}-metadata"
|
||||
@ -628,7 +641,7 @@ class CephClientHandler(RelationHandler):
|
||||
# but is in effect driven by the number of rbd's rather than
|
||||
# their size - so it can be very lightweight.
|
||||
metadata_weight = weight * 0.01
|
||||
# Resize data pool weight to accomodate metadata weight
|
||||
# Resize data pool weight to accommodate metadata weight
|
||||
weight = weight - metadata_weight
|
||||
# Create erasure profile
|
||||
self.interface.create_erasure_profile(
|
||||
@ -662,7 +675,9 @@ class CephClientHandler(RelationHandler):
|
||||
)
|
||||
else:
|
||||
self.interface.create_replicated_pool(
|
||||
name=data_pool_name, replicas=replicas, weight=weight,
|
||||
name=data_pool_name,
|
||||
replicas=replicas,
|
||||
weight=weight,
|
||||
app_name=self.app_name,
|
||||
)
|
||||
|
||||
@ -674,18 +689,16 @@ class CephClientHandler(RelationHandler):
|
||||
@property
|
||||
def key(self) -> str:
|
||||
"""Retrieve the cephx key provided for the application."""
|
||||
return self.interface.get_relation_data().get('key')
|
||||
return self.interface.get_relation_data().get("key")
|
||||
|
||||
def context(self) -> dict:
|
||||
"""Context containing Ceph connection data."""
|
||||
ctxt = super().context()
|
||||
data = self.interface.get_relation_data()
|
||||
ctxt['mon_hosts'] = ",".join(
|
||||
sorted(data.get("mon_hosts"))
|
||||
)
|
||||
ctxt['auth'] = data.get('auth')
|
||||
ctxt['key'] = data.get("key")
|
||||
ctxt['rbd_features'] = None
|
||||
ctxt["mon_hosts"] = ",".join(sorted(data.get("mon_hosts")))
|
||||
ctxt["auth"] = data.get("auth")
|
||||
ctxt["key"] = data.get("key")
|
||||
ctxt["rbd_features"] = None
|
||||
return ctxt
|
||||
|
||||
|
||||
@ -704,6 +717,7 @@ class CertificatesHandler(RelationHandler):
|
||||
# Lazy import to ensure this lib is only required if the charm
|
||||
# has this relation.
|
||||
import interface_tls_certificates.ca_client as ca_client
|
||||
|
||||
self.ca_client = ca_client
|
||||
self.sans = sans
|
||||
super().__init__(charm, relation_name, callback_f, mandatory)
|
||||
@ -715,20 +729,18 @@ class CertificatesHandler(RelationHandler):
|
||||
self.charm,
|
||||
self.relation_name,
|
||||
)
|
||||
self.framework.observe(certs.on.ca_available, self._request_certs)
|
||||
self.framework.observe(
|
||||
certs.on.ca_available,
|
||||
self._request_certs)
|
||||
self.framework.observe(
|
||||
certs.on.tls_server_config_ready,
|
||||
self._certs_ready)
|
||||
certs.on.tls_server_config_ready, self._certs_ready
|
||||
)
|
||||
return certs
|
||||
|
||||
def _request_certs(self, event: ops.framework.EventBase) -> None:
|
||||
"""Request Certificates."""
|
||||
logger.debug(f"Requesting cert for {self.sans}")
|
||||
self.interface.request_server_certificate(
|
||||
self.model.unit.name.replace('/', '-'),
|
||||
self.sans)
|
||||
self.model.unit.name.replace("/", "-"), self.sans
|
||||
)
|
||||
self.callback_f(event)
|
||||
|
||||
def _certs_ready(self, event: ops.framework.EventBase) -> None:
|
||||
@ -745,9 +757,11 @@ class CertificatesHandler(RelationHandler):
|
||||
key = self.interface.server_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.TraditionalOpenSSL,
|
||||
encryption_algorithm=serialization.NoEncryption())
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
)
|
||||
cert = self.interface.server_certificate.public_bytes(
|
||||
encoding=serialization.Encoding.PEM)
|
||||
encoding=serialization.Encoding.PEM
|
||||
)
|
||||
try:
|
||||
root_ca_chain = self.interface.root_ca_chain.public_bytes(
|
||||
encoding=serialization.Encoding.PEM
|
||||
@ -762,12 +776,15 @@ class CertificatesHandler(RelationHandler):
|
||||
root_ca_chain = bytes()
|
||||
ca_cert = (
|
||||
self.interface.ca_certificate.public_bytes(
|
||||
encoding=serialization.Encoding.PEM) +
|
||||
root_ca_chain)
|
||||
encoding=serialization.Encoding.PEM
|
||||
)
|
||||
+ root_ca_chain
|
||||
)
|
||||
ctxt = {
|
||||
'key': key.decode(),
|
||||
'cert': cert.decode(),
|
||||
'ca_cert': ca_cert.decode()}
|
||||
"key": key.decode(),
|
||||
"cert": cert.decode(),
|
||||
"ca_cert": ca_cert.decode(),
|
||||
}
|
||||
return ctxt
|
||||
|
||||
|
||||
@ -798,15 +815,15 @@ class CloudCredentialsRequiresHandler(RelationHandler):
|
||||
|
||||
def setup_event_handler(self) -> ops.charm.Object:
|
||||
"""Configure event handlers for cloud-credentials relation."""
|
||||
import charms.keystone_k8s.v0.cloud_credentials as \
|
||||
cloud_credentials
|
||||
logger.debug('Setting up the cloud-credentials event handler')
|
||||
import charms.keystone_k8s.v0.cloud_credentials as cloud_credentials
|
||||
|
||||
logger.debug("Setting up the cloud-credentials event handler")
|
||||
credentials_service = cloud_credentials.CloudCredentialsRequires(
|
||||
self.charm, self.relation_name,
|
||||
self.charm,
|
||||
self.relation_name,
|
||||
)
|
||||
self.framework.observe(
|
||||
credentials_service.on.ready,
|
||||
self._credentials_ready
|
||||
credentials_service.on.ready, self._credentials_ready
|
||||
)
|
||||
return credentials_service
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright 2021, Canonical Ltd.
|
||||
# Copyright 2021 Canonical Ltd.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -16,8 +16,13 @@
|
||||
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import List, TYPE_CHECKING
|
||||
from pathlib import (
|
||||
Path,
|
||||
)
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
List,
|
||||
)
|
||||
|
||||
import ops.pebble
|
||||
|
||||
@ -25,15 +30,17 @@ if TYPE_CHECKING:
|
||||
import ops_sunbeam.core as sunbeam_core
|
||||
import ops.model
|
||||
|
||||
from charmhelpers.contrib.openstack.templating import get_loader
|
||||
import jinja2
|
||||
from charmhelpers.contrib.openstack.templating import (
|
||||
get_loader,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_container(
|
||||
containers: List['ops.model.Container'], name: str
|
||||
) -> 'ops.model.Container':
|
||||
containers: List["ops.model.Container"], name: str
|
||||
) -> "ops.model.Container":
|
||||
"""Search for container with given name inlist of containers."""
|
||||
container = None
|
||||
for c in containers:
|
||||
@ -43,11 +50,11 @@ def get_container(
|
||||
|
||||
|
||||
def sidecar_config_render(
|
||||
container: 'ops.model.Container',
|
||||
config: 'sunbeam_core.ContainerConfigFile',
|
||||
container: "ops.model.Container",
|
||||
config: "sunbeam_core.ContainerConfigFile",
|
||||
template_dir: str,
|
||||
openstack_release: str,
|
||||
context: 'sunbeam_core.OPSCharmContexts',
|
||||
context: "sunbeam_core.OPSCharmContexts",
|
||||
) -> bool:
|
||||
"""Render templates inside containers.
|
||||
|
||||
@ -56,7 +63,7 @@ def sidecar_config_render(
|
||||
"""
|
||||
file_updated = False
|
||||
try:
|
||||
original_contents = (container.pull(config.path).read())
|
||||
original_contents = container.pull(config.path).read()
|
||||
except (ops.pebble.PathError, FileNotFoundError):
|
||||
original_contents = None
|
||||
loader = get_loader(template_dir, openstack_release)
|
||||
@ -66,9 +73,7 @@ def sidecar_config_render(
|
||||
os.path.basename(config.path) + ".j2"
|
||||
)
|
||||
except jinja2.exceptions.TemplateNotFound:
|
||||
template = _tmpl_env.get_template(
|
||||
os.path.basename(config.path)
|
||||
)
|
||||
template = _tmpl_env.get_template(os.path.basename(config.path))
|
||||
contents = template.render(context)
|
||||
if original_contents == contents:
|
||||
log.debug(
|
||||
@ -78,7 +83,8 @@ def sidecar_config_render(
|
||||
kwargs = {
|
||||
"user": config.user,
|
||||
"group": config.group,
|
||||
"permissions": config.permissions}
|
||||
"permissions": config.permissions,
|
||||
}
|
||||
parent_dir = str(Path(config.path).parent)
|
||||
if not container.isdir(parent_dir):
|
||||
container.make_dir(parent_dir, make_parents=True)
|
||||
|
@ -16,33 +16,40 @@
|
||||
|
||||
"""Module containing shared code to be used in a charms units tests."""
|
||||
|
||||
import collections
|
||||
import inspect
|
||||
import json
|
||||
import ops
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import typing
|
||||
import unittest
|
||||
import collections
|
||||
from typing import List
|
||||
from typing import (
|
||||
List,
|
||||
)
|
||||
|
||||
from mock import MagicMock, Mock, patch
|
||||
import ops
|
||||
from mock import (
|
||||
MagicMock,
|
||||
Mock,
|
||||
patch,
|
||||
)
|
||||
|
||||
sys.path.append("lib") # noqa
|
||||
sys.path.append("src") # noqa
|
||||
|
||||
from ops import framework, model
|
||||
|
||||
from ops import (
|
||||
framework,
|
||||
model,
|
||||
)
|
||||
from ops.testing import (
|
||||
SIMULATE_CAN_CONNECT,
|
||||
Harness,
|
||||
_TestingModelBackend,
|
||||
_TestingPebbleClient,
|
||||
SIMULATE_CAN_CONNECT,
|
||||
)
|
||||
|
||||
|
||||
TEST_CA = '''-----BEGIN CERTIFICATE-----
|
||||
TEST_CA = """-----BEGIN CERTIFICATE-----
|
||||
MIIDADCCAeigAwIBAgIUOTGfdiGSlKoiyWskxH1za0Nh7cYwDQYJKoZIhvcNAQEL
|
||||
BQAwGjEYMBYGA1UEAwwPRGl2aW5lQXV0aG9yaXR5MB4XDTIyMDIwNjE4MjYyM1oX
|
||||
DTMzMDEyMDE4MjYyM1owRTFDMEEGA1UEAxM6VmF1bHQgSW50ZXJtZWRpYXRlIENl
|
||||
@ -60,9 +67,9 @@ zT8PgdjdzBW80l7KAMy4/GzZvvK7MWfkkhwwnY7oXs9F3q28gFIdcYyc9A1SDg/8
|
||||
8jWI6RP5yBcNS/PgUmVV+Ko1uTHxNsKjOn7QPuUgjMBeW0fpBCHVFxz7rs+orHNF
|
||||
JSWcYpOxivTh+YO8cAxAGlKzrgZDcXQDjGfF34U/v3niDUHO+CAk6Jz3io4Oxh2X
|
||||
GksTPQ==
|
||||
-----END CERTIFICATE-----'''
|
||||
-----END CERTIFICATE-----"""
|
||||
|
||||
TEST_CHAIN = '''-----BEGIN CERTIFICATE-----
|
||||
TEST_CHAIN = """-----BEGIN CERTIFICATE-----
|
||||
MIIDADCCAeigAwIBAgIUOTGfdiGSlKoiyWskxH1za0Nh7cYwDQYJKoZIhvcNAQEL
|
||||
BQAwGjEYMBYGA1UEAwwPRGl2aW5lQXV0aG9yaXR5MB4XDTIyMDIwNjE4MjYyM1oX
|
||||
DTMzMDEyMDE4MjYyM1owRTFDMEEGA1UEAxM6VmF1bHQgSW50ZXJtZWRpYXRlIENl
|
||||
@ -80,9 +87,9 @@ zT8PgdjdzBW80l7KAMy4/GzZvvK7MWfkkhwwnY7oXs9F3q28gFIdcYyc9A1SDg/8
|
||||
8jWI6RP5yBcNS/PgUmVV+Ko1uTHxNsKjOn7QPuUgjMBeW0fpBCHVFxz7rs+orHNF
|
||||
JSWcYpOxivTh+YO8cAxAGlKzrgZDcXQDjGfF34U/v3niDUHO+CAk6Jz3io4Oxh2X
|
||||
GksTPQ==
|
||||
-----END CERTIFICATE-----'''
|
||||
-----END CERTIFICATE-----"""
|
||||
|
||||
TEST_SERVER_CERT = '''-----BEGIN CERTIFICATE-----
|
||||
TEST_SERVER_CERT = """-----BEGIN CERTIFICATE-----
|
||||
MIIEEzCCAvugAwIBAgIUIRVQ0iFgTDBP+Ju6AlcnxTHywUgwDQYJKoZIhvcNAQEL
|
||||
BQAwRTFDMEEGA1UEAxM6VmF1bHQgSW50ZXJtZWRpYXRlIENlcnRpZmljYXRlIEF1
|
||||
dGhvcml0eSAoY2hhcm0tcGtpLWxvY2FsKTAeFw0yMjAyMDcxODI1NTlaFw0yMzAy
|
||||
@ -105,9 +112,9 @@ wzSbqkarasPFVpPJnFAGqry6y5B3lZ3OrhHJOIwMSOMQfPt2dSsz+HqfrMwxqAek
|
||||
smciCVWqVwN+uq0yqeH5QuACHlkJSV4o/5SkDcFZFaFHuTRqd6hMpczZIw+o+NRn
|
||||
OO1YV69oqCCfUE01zlwTF7thZA19xacGS9f8GJO9Ij15MiysZLjxoTfoof/wDdNd
|
||||
A0Rs/pW3ja1UfTItPdjC4BgWtQh1a7O9NznrW2L6nRCASI0F1FvQ
|
||||
-----END CERTIFICATE-----'''
|
||||
-----END CERTIFICATE-----"""
|
||||
|
||||
TEST_SERVER_KEY = '''-----BEGIN RSA PRIVATE KEY-----
|
||||
TEST_SERVER_KEY = """-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEowIBAAKCAQEA4VYeKjC3o9GZAnbuVBudyd/a5sHnaGZlMJz8zevhGr5nARRR
|
||||
194bgR8VSB9k1fRbF1Y9WTygBW5aiXPy+KbmaD5DsDpJNkF/2zOQDLG9nKmLbamr
|
||||
AcHFU8l8kAVwkdhYgu3T8QbLksozYPiYavg9KfA51wVxTRuUyLpvSLJkc1q0xwuJ
|
||||
@ -133,7 +140,7 @@ C8l5gTQQnHu3h5Z7HX97GWgn1ql4X1MUr+aP6Mq9CgqzCn8s/CAZeEhOIXVgwFPq
|
||||
a03odwKBgG454yINXnHPBo9jjcEKwBTaMLH0n25HMJmWaJUnGVmPzrhxHp5xMKZz
|
||||
ULTaKTN2gp7E2BuxENtAyplrvLiXXYH3CqT528JgMdMm0al6X3MXo9WqbOg/KNpa
|
||||
4JSyyuZ42yGmYlhMCimlk3kVnDxb8PJLWOFnx6f9/i0RWUqnY0nU
|
||||
-----END RSA PRIVATE KEY-----'''
|
||||
-----END RSA PRIVATE KEY-----"""
|
||||
|
||||
|
||||
class ContainerCalls:
|
||||
@ -158,17 +165,27 @@ class ContainerCalls:
|
||||
|
||||
def started_services(self, container_name: str) -> List:
|
||||
"""Distinct unordered list of services that were started."""
|
||||
return list(set([
|
||||
svc
|
||||
for svc_list in self.start[container_name]
|
||||
for svc in svc_list]))
|
||||
return list(
|
||||
set(
|
||||
[
|
||||
svc
|
||||
for svc_list in self.start[container_name]
|
||||
for svc in svc_list
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def stopped_services(self, container_name: str) -> List:
|
||||
"""Distinct unordered list of services that were started."""
|
||||
return list(set([
|
||||
svc
|
||||
for svc_list in self.stop[container_name]
|
||||
for svc in svc_list]))
|
||||
return list(
|
||||
set(
|
||||
[
|
||||
svc
|
||||
for svc_list in self.stop[container_name]
|
||||
for svc in svc_list
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def add_push(self, container_name: str, call: typing.Dict) -> None:
|
||||
"""Log a push call."""
|
||||
@ -188,18 +205,17 @@ class ContainerCalls:
|
||||
|
||||
def updated_files(self, container_name: str) -> typing.List:
|
||||
"""Return a list of files that have been updated in a container."""
|
||||
return [c['path'] for c in self.push.get(container_name, [])]
|
||||
return [c["path"] for c in self.push.get(container_name, [])]
|
||||
|
||||
def file_update_calls(
|
||||
self,
|
||||
container_name: str,
|
||||
file_name: str
|
||||
self, container_name: str, file_name: str
|
||||
) -> typing.List:
|
||||
"""Return the update call for File_name in container_name."""
|
||||
return [
|
||||
c
|
||||
for c in self.push.get(container_name, [])
|
||||
if c['path'] == file_name]
|
||||
if c["path"] == file_name
|
||||
]
|
||||
|
||||
|
||||
class CharmTestCase(unittest.TestCase):
|
||||
@ -207,21 +223,21 @@ class CharmTestCase(unittest.TestCase):
|
||||
|
||||
container_calls = ContainerCalls()
|
||||
|
||||
def setUp(self, obj: 'typing.ANY', patches: 'typing.List') -> None:
|
||||
def setUp(self, obj: "typing.ANY", patches: "typing.List") -> None:
|
||||
"""Run constructor."""
|
||||
super().setUp()
|
||||
self.patches = patches
|
||||
self.obj = obj
|
||||
self.patch_all()
|
||||
|
||||
def patch(self, method: 'typing.ANY') -> Mock:
|
||||
def patch(self, method: "typing.ANY") -> Mock:
|
||||
"""Patch the named method on self.obj."""
|
||||
_m = patch.object(self.obj, method)
|
||||
mock = _m.start()
|
||||
self.addCleanup(_m.stop)
|
||||
return mock
|
||||
|
||||
def patch_obj(self, obj: 'typing.ANY', method: 'typing.ANY') -> Mock:
|
||||
def patch_obj(self, obj: "typing.ANY", method: "typing.ANY") -> Mock:
|
||||
"""Patch the named method on obj."""
|
||||
_m = patch.object(obj, method)
|
||||
mock = _m.start()
|
||||
@ -233,10 +249,15 @@ class CharmTestCase(unittest.TestCase):
|
||||
for method in self.patches:
|
||||
setattr(self, method, self.patch(method))
|
||||
|
||||
def check_file(self, container: str, path: str,
|
||||
contents: typing.List = None,
|
||||
user: str = None, group: str = None,
|
||||
permissions: str = None) -> None:
|
||||
def check_file(
|
||||
self,
|
||||
container: str,
|
||||
path: str,
|
||||
contents: typing.List = None,
|
||||
user: str = None,
|
||||
group: str = None,
|
||||
permissions: str = None,
|
||||
) -> None:
|
||||
"""Check the attributes of a file."""
|
||||
client = self.harness.charm.unit.get_container(container)._pebble
|
||||
files = client.list_files(path, itself=True)
|
||||
@ -248,21 +269,18 @@ class CharmTestCase(unittest.TestCase):
|
||||
received_data = infile.read()
|
||||
self.assertEqual(contents, received_data)
|
||||
if user:
|
||||
self.assertEqual(
|
||||
test_file.user, user)
|
||||
self.assertEqual(test_file.user, user)
|
||||
if group:
|
||||
self.assertEqual(
|
||||
test_file.group, group)
|
||||
self.assertEqual(test_file.group, group)
|
||||
if permissions:
|
||||
self.assertEqual(
|
||||
test_file.permissions, permissions)
|
||||
self.assertEqual(test_file.permissions, permissions)
|
||||
|
||||
|
||||
def add_ingress_relation(harness: Harness, endpoint_type: str) -> str:
|
||||
"""Add ingress relation."""
|
||||
app_name = 'traefik-' + endpoint_type
|
||||
unit_name = app_name + '/0'
|
||||
rel_name = 'ingress-' + endpoint_type
|
||||
app_name = "traefik-" + endpoint_type
|
||||
unit_name = app_name + "/0"
|
||||
rel_name = "ingress-" + endpoint_type
|
||||
rel_id = harness.add_relation(rel_name, app_name)
|
||||
harness.add_relation_unit(rel_id, unit_name)
|
||||
return rel_id
|
||||
@ -272,24 +290,20 @@ def add_ingress_relation_data(
|
||||
harness: Harness, rel_id: str, endpoint_type: str
|
||||
) -> None:
|
||||
"""Add ingress data to ingress relation."""
|
||||
app_name = 'traefik-' + endpoint_type
|
||||
url = 'http://' + endpoint_type + "-url"
|
||||
app_name = "traefik-" + endpoint_type
|
||||
url = "http://" + endpoint_type + "-url"
|
||||
|
||||
ingress_data = {"url": url}
|
||||
harness.update_relation_data(
|
||||
rel_id,
|
||||
app_name,
|
||||
{"ingress": json.dumps(ingress_data)})
|
||||
rel_id, app_name, {"ingress": json.dumps(ingress_data)}
|
||||
)
|
||||
|
||||
|
||||
def add_complete_ingress_relation(harness: Harness) -> None:
|
||||
"""Add complete Ingress relation."""
|
||||
for endpoint_type in ['internal', 'public']:
|
||||
for endpoint_type in ["internal", "public"]:
|
||||
rel_id = add_ingress_relation(harness, endpoint_type)
|
||||
add_ingress_relation_data(
|
||||
harness,
|
||||
rel_id,
|
||||
endpoint_type)
|
||||
add_ingress_relation_data(harness, rel_id, endpoint_type)
|
||||
|
||||
|
||||
def add_base_amqp_relation(harness: Harness) -> str:
|
||||
@ -303,9 +317,7 @@ def add_base_amqp_relation(harness: Harness) -> str:
|
||||
return rel_id
|
||||
|
||||
|
||||
def add_amqp_relation_credentials(
|
||||
harness: Harness, rel_id: str
|
||||
) -> None:
|
||||
def add_amqp_relation_credentials(harness: Harness, rel_id: str) -> None:
|
||||
"""Add amqp data to amqp relation."""
|
||||
harness.update_relation_data(
|
||||
rel_id,
|
||||
@ -390,7 +402,7 @@ def add_cloud_credentials_relation_response(
|
||||
"user-domain-id": "udomain-id",
|
||||
"project-domain-name": "pdomain_-ame",
|
||||
"project-domain-id": "pdomain-id",
|
||||
"region": "region12"
|
||||
"region": "region12",
|
||||
},
|
||||
)
|
||||
|
||||
@ -406,9 +418,7 @@ def add_base_db_relation(harness: Harness) -> str:
|
||||
return rel_id
|
||||
|
||||
|
||||
def add_db_relation_credentials(
|
||||
harness: Harness, rel_id: str
|
||||
) -> None:
|
||||
def add_db_relation_credentials(harness: Harness, rel_id: str) -> None:
|
||||
"""Add db credentials data to db relation."""
|
||||
harness.update_relation_data(
|
||||
rel_id,
|
||||
@ -433,64 +443,59 @@ def add_api_relations(harness: Harness) -> None:
|
||||
def add_complete_db_relation(harness: Harness) -> None:
|
||||
"""Add complete DB relation."""
|
||||
rel_id = add_base_db_relation(harness)
|
||||
add_db_relation_credentials(
|
||||
harness,
|
||||
rel_id)
|
||||
add_db_relation_credentials(harness, rel_id)
|
||||
return rel_id
|
||||
|
||||
|
||||
def add_complete_identity_relation(harness: Harness) -> None:
|
||||
"""Add complete Identity relation."""
|
||||
rel_id = add_base_identity_service_relation(harness)
|
||||
add_identity_service_relation_response(
|
||||
harness,
|
||||
rel_id)
|
||||
add_identity_service_relation_response(harness, rel_id)
|
||||
return rel_id
|
||||
|
||||
|
||||
def add_complete_cloud_credentials_relation(harness: Harness) -> None:
|
||||
"""Add complete cloud-credentials relation."""
|
||||
rel_id = add_base_cloud_credentials_relation(harness)
|
||||
add_cloud_credentials_relation_response(
|
||||
harness,
|
||||
rel_id)
|
||||
add_cloud_credentials_relation_response(harness, rel_id)
|
||||
return rel_id
|
||||
|
||||
|
||||
def add_complete_amqp_relation(harness: Harness) -> None:
|
||||
"""Add complete AMQP relation."""
|
||||
rel_id = add_base_amqp_relation(harness)
|
||||
add_amqp_relation_credentials(
|
||||
harness,
|
||||
rel_id)
|
||||
add_amqp_relation_credentials(harness, rel_id)
|
||||
return rel_id
|
||||
|
||||
|
||||
def add_ceph_relation_credentials(
|
||||
harness: Harness, rel_id: str
|
||||
) -> None:
|
||||
def add_ceph_relation_credentials(harness: Harness, rel_id: str) -> None:
|
||||
"""Add amqp data to amqp relation."""
|
||||
# During tests the charm class is never destroyed and recreated as it
|
||||
# would be between hook executions. This means request is never marked
|
||||
# as complete as it never matches the previous request and always looks
|
||||
# like it needs resending.
|
||||
harness.charm.ceph.interface.previous_requests = \
|
||||
harness.charm.ceph.interface.previous_requests = (
|
||||
harness.charm.ceph.interface.get_previous_requests_from_relations()
|
||||
)
|
||||
request = json.loads(
|
||||
harness.get_relation_data(rel_id, harness.charm.unit.name)[
|
||||
'broker_req'])
|
||||
client_unit = harness.charm.unit.name.replace('/', '-')
|
||||
"broker_req"
|
||||
]
|
||||
)
|
||||
client_unit = harness.charm.unit.name.replace("/", "-")
|
||||
harness.update_relation_data(
|
||||
rel_id,
|
||||
"ceph-mon/0",
|
||||
{
|
||||
'auth': 'cephx',
|
||||
'key': 'AQBUfpVeNl7CHxAA8/f6WTcYFxW2dJ5VyvWmJg==',
|
||||
'ingress-address': '192.0.2.2',
|
||||
'ceph-public-address': '192.0.2.2',
|
||||
f'broker-rsp-{client_unit}': json.dumps({
|
||||
'exit-code': 0,
|
||||
'request-id': request['request-id']})})
|
||||
"auth": "cephx",
|
||||
"key": "AQBUfpVeNl7CHxAA8/f6WTcYFxW2dJ5VyvWmJg==",
|
||||
"ingress-address": "192.0.2.2",
|
||||
"ceph-public-address": "192.0.2.2",
|
||||
f"broker-rsp-{client_unit}": json.dumps(
|
||||
{"exit-code": 0, "request-id": request["request-id"]}
|
||||
),
|
||||
},
|
||||
)
|
||||
harness.add_relation_unit(rel_id, "ceph-mon/1")
|
||||
|
||||
|
||||
@ -507,25 +512,23 @@ def add_base_ceph_relation(harness: Harness) -> str:
|
||||
def add_complete_ceph_relation(harness: Harness) -> None:
|
||||
"""Add complete ceph relation."""
|
||||
rel_id = add_base_ceph_relation(harness)
|
||||
add_ceph_relation_credentials(
|
||||
harness,
|
||||
rel_id)
|
||||
add_ceph_relation_credentials(harness, rel_id)
|
||||
return rel_id
|
||||
|
||||
|
||||
def add_certificates_relation_certs(
|
||||
harness: Harness, rel_id: str
|
||||
) -> None:
|
||||
def add_certificates_relation_certs(harness: Harness, rel_id: str) -> None:
|
||||
"""Add cert data to certificates relation."""
|
||||
client_unit = harness.charm.unit.name.replace('/', '_')
|
||||
client_unit = harness.charm.unit.name.replace("/", "_")
|
||||
harness.update_relation_data(
|
||||
rel_id,
|
||||
'vault/0',
|
||||
"vault/0",
|
||||
{
|
||||
f'{client_unit}.server.cert': TEST_SERVER_CERT,
|
||||
f'{client_unit}.server.key': TEST_SERVER_KEY,
|
||||
'chain': TEST_CHAIN,
|
||||
'ca': TEST_CA})
|
||||
f"{client_unit}.server.cert": TEST_SERVER_CERT,
|
||||
f"{client_unit}.server.key": TEST_SERVER_KEY,
|
||||
"chain": TEST_CHAIN,
|
||||
"ca": TEST_CA,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def add_base_certificates_relation(harness: Harness) -> str:
|
||||
@ -541,17 +544,13 @@ def add_base_certificates_relation(harness: Harness) -> str:
|
||||
def add_complete_certificates_relation(harness: Harness) -> None:
|
||||
"""Add complete certificates relation."""
|
||||
rel_id = add_base_certificates_relation(harness)
|
||||
add_certificates_relation_certs(
|
||||
harness,
|
||||
rel_id)
|
||||
add_certificates_relation_certs(harness, rel_id)
|
||||
return rel_id
|
||||
|
||||
|
||||
def add_complete_peer_relation(harness: Harness) -> None:
|
||||
"""Add complete peer relation."""
|
||||
rel_id = harness.add_relation(
|
||||
'peers',
|
||||
harness.charm.app.name)
|
||||
rel_id = harness.add_relation("peers", harness.charm.app.name)
|
||||
new_unit = f"{harness.charm.app.name}/1"
|
||||
harness.add_relation_unit(rel_id, new_unit)
|
||||
harness.update_relation_data(
|
||||
@ -561,13 +560,14 @@ def add_complete_peer_relation(harness: Harness) -> None:
|
||||
|
||||
|
||||
test_relations = {
|
||||
'database': add_complete_db_relation,
|
||||
'amqp': add_complete_amqp_relation,
|
||||
'identity-service': add_complete_identity_relation,
|
||||
'cloud-credentials': add_complete_cloud_credentials_relation,
|
||||
'peers': add_complete_peer_relation,
|
||||
'certificates': add_complete_certificates_relation,
|
||||
'ceph': add_complete_ceph_relation}
|
||||
"database": add_complete_db_relation,
|
||||
"amqp": add_complete_amqp_relation,
|
||||
"identity-service": add_complete_identity_relation,
|
||||
"cloud-credentials": add_complete_cloud_credentials_relation,
|
||||
"peers": add_complete_peer_relation,
|
||||
"certificates": add_complete_certificates_relation,
|
||||
"ceph": add_complete_ceph_relation,
|
||||
}
|
||||
|
||||
|
||||
def add_all_relations(harness: Harness) -> None:
|
||||
@ -606,7 +606,6 @@ def get_harness(
|
||||
"""Return a testing harness."""
|
||||
|
||||
class _OSTestingPebbleClient(_TestingPebbleClient):
|
||||
|
||||
def exec(
|
||||
self,
|
||||
command: typing.List[str],
|
||||
@ -619,34 +618,35 @@ def get_harness(
|
||||
group_id: int = None,
|
||||
group: str = None,
|
||||
stdin: typing.Union[
|
||||
str, bytes, typing.TextIO, typing.BinaryIO] = None,
|
||||
str, bytes, typing.TextIO, typing.BinaryIO
|
||||
] = None,
|
||||
stdout: typing.Union[typing.TextIO, typing.BinaryIO] = None,
|
||||
stderr: typing.Union[typing.TextIO, typing.BinaryIO] = None,
|
||||
encoding: str = 'utf-8',
|
||||
combine_stderr: bool = False
|
||||
encoding: str = "utf-8",
|
||||
combine_stderr: bool = False,
|
||||
) -> None:
|
||||
container_calls.add_execute(
|
||||
self.container_name,
|
||||
command)
|
||||
container_calls.add_execute(self.container_name, command)
|
||||
process_mock = MagicMock()
|
||||
process_mock.wait_output.return_value = ('', None)
|
||||
process_mock.wait_output.return_value = ("", None)
|
||||
return process_mock
|
||||
|
||||
def start_services(
|
||||
self, services: List[str], timeout: float = 30.0,
|
||||
delay: float = 0.1,) -> None:
|
||||
self,
|
||||
services: List[str],
|
||||
timeout: float = 30.0,
|
||||
delay: float = 0.1,
|
||||
) -> None:
|
||||
"""Record start service events."""
|
||||
container_calls.add_start(
|
||||
self.container_name,
|
||||
services)
|
||||
container_calls.add_start(self.container_name, services)
|
||||
|
||||
def stop_services(
|
||||
self, services: List[str], timeout: float = 30.0,
|
||||
delay: float = 0.1,) -> None:
|
||||
self,
|
||||
services: List[str],
|
||||
timeout: float = 30.0,
|
||||
delay: float = 0.1,
|
||||
) -> None:
|
||||
"""Record stop service events."""
|
||||
container_calls.add_stop(
|
||||
self.container_name,
|
||||
services)
|
||||
container_calls.add_stop(self.container_name, services)
|
||||
|
||||
class _OSTestingModelBackend(_TestingModelBackend):
|
||||
def get_pebble(self, socket_path: str) -> _OSTestingPebbleClient:
|
||||
@ -656,7 +656,7 @@ def get_harness(
|
||||
client = _OSTestingPebbleClient(self)
|
||||
# Extract container name from:
|
||||
# /charm/containers/placement-api/pebble.socket
|
||||
client.container_name = socket_path.split('/')[3]
|
||||
client.container_name = socket_path.split("/")[3]
|
||||
self._pebble_clients[socket_path] = client
|
||||
self._pebble_clients_can_connect[client] = not SIMULATE_CAN_CONNECT
|
||||
return client
|
||||
@ -692,11 +692,7 @@ def get_harness(
|
||||
with open(metadata_file) as f:
|
||||
charm_metadata = f.read()
|
||||
|
||||
harness = Harness(
|
||||
charm_class,
|
||||
meta=charm_metadata,
|
||||
config=charm_config
|
||||
)
|
||||
harness = Harness(charm_class, meta=charm_metadata, config=charm_config)
|
||||
harness._backend = _OSTestingModelBackend(
|
||||
harness._unit_name, harness._meta, harness._get_config(charm_config)
|
||||
)
|
||||
|
39
ops-sunbeam/pyproject.toml
Normal file
39
ops-sunbeam/pyproject.toml
Normal file
@ -0,0 +1,39 @@
|
||||
# Copyright 2022 Canonical Ltd.
|
||||
# See LICENSE file for licensing details.
|
||||
|
||||
# Testing tools configuration
|
||||
[tool.coverage.run]
|
||||
branch = true
|
||||
|
||||
[tool.coverage.report]
|
||||
show_missing = true
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
minversion = "6.0"
|
||||
log_cli_level = "INFO"
|
||||
|
||||
# Formatting tools configuration
|
||||
[tool.black]
|
||||
line-length = 79
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
multi_line_output = 3
|
||||
force_grid_wrap = true
|
||||
|
||||
# Linting tools configuration
|
||||
[tool.flake8]
|
||||
max-line-length = 79
|
||||
max-doc-length = 99
|
||||
max-complexity = 10
|
||||
exclude = [".git", "__pycache__", ".tox", "build", "dist", "*.egg_info", "venv"]
|
||||
select = ["E", "W", "F", "C", "N", "R", "D", "H"]
|
||||
# Ignore W503, E501 because using black creates errors with this
|
||||
# Ignore D107 Missing docstring in __init__
|
||||
ignore = ["W503", "E501", "D107", "E402"]
|
||||
per-file-ignores = []
|
||||
docstring-convention = "google"
|
||||
# Check for properly formatted copyright header in each file
|
||||
copyright-check = "True"
|
||||
copyright-author = "Canonical Ltd."
|
||||
copyright-regexp = "Copyright\\s\\d{4}([-,]\\d{4})*\\s+%(author)s"
|
@ -2,12 +2,20 @@
|
||||
|
||||
[tox]
|
||||
skipsdist = True
|
||||
envlist = pep8,py3
|
||||
envlist = lint, py3
|
||||
sitepackages = False
|
||||
skip_missing_interpreters = False
|
||||
minversion = 3.18.0
|
||||
requires = virtualenv < 20.0
|
||||
|
||||
[vars]
|
||||
src_path = {toxinidir}/ops_sunbeam
|
||||
tst_path = {toxinidir}/unit_tests/
|
||||
tst_lib_path = {toxinidir}/unit_tests/lib/
|
||||
pyproject_toml = {toxinidir}/pyproject.toml
|
||||
cookie_cutter_path = {toxinidir}/shared_code/sunbeam_charm/\{\{cookiecutter.service_name\}\}
|
||||
all_path = {[vars]src_path} {[vars]tst_path}
|
||||
|
||||
[testenv]
|
||||
basepython = python3
|
||||
install_command =
|
||||
@ -20,6 +28,15 @@ allowlist_externals =
|
||||
deps =
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
|
||||
[testenv:fmt]
|
||||
description = Apply coding style standards to code
|
||||
deps =
|
||||
black
|
||||
isort
|
||||
commands =
|
||||
isort {[vars]all_path} --skip-glob {[vars]tst_lib_path} --skip {toxinidir}/.tox
|
||||
black --config {[vars]pyproject_toml} {[vars]all_path} --exclude {[vars]tst_lib_path}
|
||||
|
||||
[testenv:fetch]
|
||||
basepython = python3
|
||||
deps =
|
||||
@ -50,11 +67,29 @@ basepython = python3.10
|
||||
deps = {[testenv:py3]deps}
|
||||
|
||||
[testenv:pep8]
|
||||
basepython = python3
|
||||
description = Alias for lint
|
||||
deps = {[testenv:lint]deps}
|
||||
commands = {[testenv:lint]commands}
|
||||
|
||||
[testenv:lint]
|
||||
description = Check code against coding style standards
|
||||
deps =
|
||||
{[testenv]deps}
|
||||
-r{toxinidir}/requirements.txt
|
||||
commands = flake8 {posargs} unit_tests ops_sunbeam --exclude unit_tests/lib
|
||||
black
|
||||
# flake8==4.0.1 # Pin version until https://github.com/csachs/pyproject-flake8/pull/14 is merged
|
||||
flake8
|
||||
flake8-docstrings
|
||||
flake8-copyright
|
||||
flake8-builtins
|
||||
pyproject-flake8
|
||||
pep8-naming
|
||||
isort
|
||||
codespell
|
||||
commands =
|
||||
codespell {[vars]all_path}
|
||||
# pflake8 wrapper supports config from pyproject.toml
|
||||
pflake8 --exclude {[vars]tst_lib_path} --config {toxinidir}/pyproject.toml {[vars]all_path}
|
||||
isort --check-only --diff {[vars]all_path} --skip-glob {[vars]tst_lib_path}
|
||||
black --config {[vars]pyproject_toml} --check --diff {[vars]all_path} --exclude {[vars]tst_lib_path}
|
||||
|
||||
[testenv:cover]
|
||||
basepython = python3
|
||||
|
@ -14,4 +14,5 @@
|
||||
|
||||
"""Unit tests for aso."""
|
||||
import ops.testing
|
||||
|
||||
ops.testing.SIMULATE_CAN_CONNECT = True
|
||||
|
@ -17,14 +17,18 @@
|
||||
"""Test charms for unit tests."""
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import sys
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
import tempfile
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import ops.framework
|
||||
from typing import List
|
||||
|
||||
from typing import (
|
||||
List,
|
||||
)
|
||||
|
||||
sys.path.append("unit_tests/lib") # noqa
|
||||
sys.path.append("src") # noqa
|
||||
@ -44,9 +48,7 @@ options:
|
||||
type: string
|
||||
"""
|
||||
|
||||
INITIAL_CHARM_CONFIG = {
|
||||
'debug': 'true',
|
||||
'region': 'RegionOne'}
|
||||
INITIAL_CHARM_CONFIG = {"debug": "true", "region": "RegionOne"}
|
||||
|
||||
CHARM_METADATA = """
|
||||
name: my-service
|
||||
@ -206,7 +208,10 @@ class MyAPICharm(sunbeam_charm.OSBaseOperatorAPICharm):
|
||||
wsgi_admin_script = "/bin/wsgi_admin"
|
||||
wsgi_public_script = "/bin/wsgi_public"
|
||||
mandatory_relations = {
|
||||
"database", "amqp", "identity-service", "ingress-public"
|
||||
"database",
|
||||
"amqp",
|
||||
"identity-service",
|
||||
"ingress-public",
|
||||
}
|
||||
|
||||
def __init__(self, framework: "ops.framework.Framework") -> None:
|
||||
@ -256,7 +261,7 @@ class MyAPICharm(sunbeam_charm.OSBaseOperatorAPICharm):
|
||||
@property
|
||||
def healthcheck_http_url(self) -> str:
|
||||
"""Healthcheck HTTP URL for the service."""
|
||||
return f'http://localhost:{self.default_public_ingress_port}/v3'
|
||||
return f"http://localhost:{self.default_public_ingress_port}/v3"
|
||||
|
||||
|
||||
class MultiSvcPebbleHandler(sunbeam_chandlers.ServicePebbleHandler):
|
||||
@ -300,5 +305,6 @@ class TestMultiSvcCharm(MyAPICharm):
|
||||
self.container_configs,
|
||||
self.template_dir,
|
||||
self.openstack_release,
|
||||
self.configure_charm
|
||||
)]
|
||||
self.configure_charm,
|
||||
)
|
||||
]
|
||||
|
@ -14,18 +14,27 @@
|
||||
|
||||
"""Test compound_status."""
|
||||
|
||||
import mock
|
||||
import sys
|
||||
|
||||
import mock
|
||||
|
||||
sys.path.append("lib") # noqa
|
||||
sys.path.append("src") # noqa
|
||||
|
||||
from ops.model import ActiveStatus, BlockedStatus, UnknownStatus, WaitingStatus
|
||||
from ops.model import (
|
||||
ActiveStatus,
|
||||
BlockedStatus,
|
||||
UnknownStatus,
|
||||
WaitingStatus,
|
||||
)
|
||||
|
||||
import ops_sunbeam.charm as sunbeam_charm
|
||||
import ops_sunbeam.compound_status as compound_status
|
||||
import ops_sunbeam.test_utils as test_utils
|
||||
from . import test_charms
|
||||
|
||||
from . import (
|
||||
test_charms,
|
||||
)
|
||||
|
||||
|
||||
class TestCompoundStatus(test_utils.CharmTestCase):
|
||||
|
@ -14,24 +14,27 @@
|
||||
|
||||
"""Test aso."""
|
||||
|
||||
import mock
|
||||
import sys
|
||||
|
||||
sys.path.append('lib') # noqa
|
||||
sys.path.append('src') # noqa
|
||||
import mock
|
||||
|
||||
sys.path.append("lib") # noqa
|
||||
sys.path.append("src") # noqa
|
||||
|
||||
import ops.model
|
||||
|
||||
import ops_sunbeam.charm as sunbeam_charm
|
||||
import ops_sunbeam.test_utils as test_utils
|
||||
from . import test_charms
|
||||
|
||||
from . import (
|
||||
test_charms,
|
||||
)
|
||||
|
||||
|
||||
class TestOSBaseOperatorCharm(test_utils.CharmTestCase):
|
||||
"""Test for the OSBaseOperatorCharm class."""
|
||||
|
||||
PATCHES = [
|
||||
]
|
||||
PATCHES = []
|
||||
|
||||
def setUp(self) -> None:
|
||||
"""Charm test class setup."""
|
||||
@ -42,37 +45,33 @@ class TestOSBaseOperatorCharm(test_utils.CharmTestCase):
|
||||
test_charms.CHARM_METADATA,
|
||||
self.container_calls,
|
||||
charm_config=test_charms.CHARM_CONFIG,
|
||||
initial_charm_config=test_charms.INITIAL_CHARM_CONFIG)
|
||||
initial_charm_config=test_charms.INITIAL_CHARM_CONFIG,
|
||||
)
|
||||
self.harness.begin()
|
||||
self.addCleanup(self.harness.cleanup)
|
||||
|
||||
def set_pebble_ready(self) -> None:
|
||||
"""Set pebble ready event."""
|
||||
self.harness.container_pebble_ready('my-service')
|
||||
self.harness.container_pebble_ready("my-service")
|
||||
|
||||
def test_pebble_ready_handler(self) -> None:
|
||||
"""Test is raised and observed."""
|
||||
self.assertEqual(self.harness.charm.seen_events, [])
|
||||
self.set_pebble_ready()
|
||||
self.assertEqual(self.harness.charm.seen_events, ['PebbleReadyEvent'])
|
||||
self.assertEqual(self.harness.charm.seen_events, ["PebbleReadyEvent"])
|
||||
|
||||
def test_write_config(self) -> None:
|
||||
"""Test writing config when charm is ready."""
|
||||
self.set_pebble_ready()
|
||||
self.assertEqual(
|
||||
self.container_calls.push['my-service'],
|
||||
[])
|
||||
self.assertEqual(self.container_calls.push["my-service"], [])
|
||||
|
||||
def test_container_names(self) -> None:
|
||||
"""Test container name list is correct."""
|
||||
self.assertEqual(
|
||||
self.harness.charm.container_names,
|
||||
['my-service'])
|
||||
self.assertEqual(self.harness.charm.container_names, ["my-service"])
|
||||
|
||||
def test_relation_handlers_ready(self) -> None:
|
||||
"""Test relation handlers are ready."""
|
||||
self.assertTrue(
|
||||
self.harness.charm.relation_handlers_ready())
|
||||
self.assertTrue(self.harness.charm.relation_handlers_ready())
|
||||
|
||||
|
||||
class _TestOSBaseOperatorAPICharm(test_utils.CharmTestCase):
|
||||
@ -90,14 +89,16 @@ class _TestOSBaseOperatorAPICharm(test_utils.CharmTestCase):
|
||||
test_charms.API_CHARM_METADATA,
|
||||
self.container_calls,
|
||||
charm_config=test_charms.CHARM_CONFIG,
|
||||
initial_charm_config=test_charms.INITIAL_CHARM_CONFIG)
|
||||
initial_charm_config=test_charms.INITIAL_CHARM_CONFIG,
|
||||
)
|
||||
|
||||
# clean up events that were dynamically defined,
|
||||
# otherwise we get issues because they'll be redefined,
|
||||
# which is not allowed.
|
||||
from charms.data_platform_libs.v0.database_requires import (
|
||||
DatabaseEvents
|
||||
DatabaseEvents,
|
||||
)
|
||||
|
||||
for attr in (
|
||||
"database_database_created",
|
||||
"database_endpoints_changed",
|
||||
@ -113,15 +114,16 @@ class _TestOSBaseOperatorAPICharm(test_utils.CharmTestCase):
|
||||
|
||||
def set_pebble_ready(self) -> None:
|
||||
"""Set pebble ready event."""
|
||||
self.harness.container_pebble_ready('my-service')
|
||||
self.harness.container_pebble_ready("my-service")
|
||||
|
||||
|
||||
class TestOSBaseOperatorAPICharm(_TestOSBaseOperatorAPICharm):
|
||||
"""Test Charm with services."""
|
||||
|
||||
@mock.patch(
|
||||
'charms.observability_libs.v0.kubernetes_service_patch.'
|
||||
'KubernetesServicePatch')
|
||||
"charms.observability_libs.v0.kubernetes_service_patch."
|
||||
"KubernetesServicePatch"
|
||||
)
|
||||
def setUp(self, mock_svc_patch: mock.patch) -> None:
|
||||
"""Run test class setup."""
|
||||
super().setUp(test_charms.MyAPICharm)
|
||||
@ -132,32 +134,33 @@ class TestOSBaseOperatorAPICharm(_TestOSBaseOperatorAPICharm):
|
||||
self.harness.set_leader()
|
||||
test_utils.add_complete_peer_relation(self.harness)
|
||||
self.set_pebble_ready()
|
||||
self.harness.charm.leader_set({'foo': 'bar'})
|
||||
self.harness.charm.leader_set({"foo": "bar"})
|
||||
test_utils.add_api_relations(self.harness)
|
||||
test_utils.add_complete_cloud_credentials_relation(self.harness)
|
||||
expect_entries = [
|
||||
'/bin/wsgi_admin',
|
||||
'hardpassword',
|
||||
'True',
|
||||
'rabbit://my-service:rabbit.pass@10.0.0.13:5672/openstack',
|
||||
'rabbithost1.local',
|
||||
'svcpass1',
|
||||
'bar']
|
||||
expect_string = '\n' + '\n'.join(expect_entries)
|
||||
self.harness.set_can_connect('my-service', True)
|
||||
"/bin/wsgi_admin",
|
||||
"hardpassword",
|
||||
"True",
|
||||
"rabbit://my-service:rabbit.pass@10.0.0.13:5672/openstack",
|
||||
"rabbithost1.local",
|
||||
"svcpass1",
|
||||
"bar",
|
||||
]
|
||||
expect_string = "\n" + "\n".join(expect_entries)
|
||||
self.harness.set_can_connect("my-service", True)
|
||||
self.check_file(
|
||||
'my-service',
|
||||
'/etc/my-service/my-service.conf',
|
||||
"my-service",
|
||||
"/etc/my-service/my-service.conf",
|
||||
contents=expect_string,
|
||||
user='my-service',
|
||||
group='my-service',
|
||||
user="my-service",
|
||||
group="my-service",
|
||||
)
|
||||
self.check_file(
|
||||
'my-service',
|
||||
'/etc/apache2/sites-available/wsgi-my-service.conf',
|
||||
"my-service",
|
||||
"/etc/apache2/sites-available/wsgi-my-service.conf",
|
||||
contents=expect_string,
|
||||
user='root',
|
||||
group='root',
|
||||
user="root",
|
||||
group="root",
|
||||
)
|
||||
|
||||
def test_assess_status(self) -> None:
|
||||
@ -165,20 +168,20 @@ class TestOSBaseOperatorAPICharm(_TestOSBaseOperatorAPICharm):
|
||||
test_utils.add_complete_ingress_relation(self.harness)
|
||||
self.harness.set_leader()
|
||||
test_utils.add_complete_peer_relation(self.harness)
|
||||
self.harness.charm.leader_set({'foo': 'bar'})
|
||||
self.harness.charm.leader_set({"foo": "bar"})
|
||||
test_utils.add_api_relations(self.harness)
|
||||
test_utils.add_complete_cloud_credentials_relation(self.harness)
|
||||
self.harness.set_can_connect('my-service', True)
|
||||
self.harness.set_can_connect("my-service", True)
|
||||
self.assertNotEqual(
|
||||
self.harness.charm.status.status,
|
||||
ops.model.ActiveStatus())
|
||||
self.harness.charm.status.status, ops.model.ActiveStatus()
|
||||
)
|
||||
self.set_pebble_ready()
|
||||
for ph in self.harness.charm.pebble_handlers:
|
||||
self.assertTrue(ph.service_ready)
|
||||
|
||||
self.assertEqual(
|
||||
self.harness.charm.status.status,
|
||||
ops.model.ActiveStatus())
|
||||
self.harness.charm.status.status, ops.model.ActiveStatus()
|
||||
)
|
||||
|
||||
def test_start_services(self) -> None:
|
||||
"""Test service is started."""
|
||||
@ -186,81 +189,62 @@ class TestOSBaseOperatorAPICharm(_TestOSBaseOperatorAPICharm):
|
||||
self.harness.set_leader()
|
||||
test_utils.add_complete_peer_relation(self.harness)
|
||||
self.set_pebble_ready()
|
||||
self.harness.charm.leader_set({'foo': 'bar'})
|
||||
self.harness.charm.leader_set({"foo": "bar"})
|
||||
test_utils.add_api_relations(self.harness)
|
||||
test_utils.add_complete_cloud_credentials_relation(self.harness)
|
||||
self.harness.set_can_connect('my-service', True)
|
||||
self.harness.set_can_connect("my-service", True)
|
||||
self.assertEqual(
|
||||
self.container_calls.started_services('my-service'),
|
||||
['wsgi-my-service'])
|
||||
self.container_calls.started_services("my-service"),
|
||||
["wsgi-my-service"],
|
||||
)
|
||||
|
||||
def test__on_database_changed(self) -> None:
|
||||
"""Test database is requested."""
|
||||
rel_id = self.harness.add_relation('peers', 'my-service')
|
||||
self.harness.add_relation_unit(
|
||||
rel_id,
|
||||
'my-service/1')
|
||||
rel_id = self.harness.add_relation("peers", "my-service")
|
||||
self.harness.add_relation_unit(rel_id, "my-service/1")
|
||||
self.harness.set_leader()
|
||||
self.set_pebble_ready()
|
||||
db_rel_id = test_utils.add_base_db_relation(self.harness)
|
||||
test_utils.add_db_relation_credentials(self.harness, db_rel_id)
|
||||
rel_data = self.harness.get_relation_data(
|
||||
db_rel_id,
|
||||
'my-service')
|
||||
requested_db = rel_data['database']
|
||||
self.assertEqual(requested_db, 'my_service')
|
||||
rel_data = self.harness.get_relation_data(db_rel_id, "my-service")
|
||||
requested_db = rel_data["database"]
|
||||
self.assertEqual(requested_db, "my_service")
|
||||
|
||||
def test_contexts(self) -> None:
|
||||
"""Test contexts are correctly populated."""
|
||||
rel_id = self.harness.add_relation('peers', 'my-service')
|
||||
self.harness.add_relation_unit(
|
||||
rel_id,
|
||||
'my-service/1')
|
||||
rel_id = self.harness.add_relation("peers", "my-service")
|
||||
self.harness.add_relation_unit(rel_id, "my-service/1")
|
||||
self.harness.set_leader()
|
||||
self.set_pebble_ready()
|
||||
db_rel_id = test_utils.add_base_db_relation(self.harness)
|
||||
test_utils.add_db_relation_credentials(self.harness, db_rel_id)
|
||||
contexts = self.harness.charm.contexts()
|
||||
self.assertEqual(
|
||||
contexts.wsgi_config.wsgi_admin_script,
|
||||
'/bin/wsgi_admin')
|
||||
self.assertEqual(
|
||||
contexts.database.database_password,
|
||||
'hardpassword')
|
||||
self.assertEqual(
|
||||
contexts.options.debug,
|
||||
True)
|
||||
contexts.wsgi_config.wsgi_admin_script, "/bin/wsgi_admin"
|
||||
)
|
||||
self.assertEqual(contexts.database.database_password, "hardpassword")
|
||||
self.assertEqual(contexts.options.debug, True)
|
||||
|
||||
def test_peer_leader_db(self) -> None:
|
||||
"""Test interacting with peer app db."""
|
||||
rel_id = self.harness.add_relation('peers', 'my-service')
|
||||
self.harness.add_relation_unit(
|
||||
rel_id,
|
||||
'my-service/1')
|
||||
rel_id = self.harness.add_relation("peers", "my-service")
|
||||
self.harness.add_relation_unit(rel_id, "my-service/1")
|
||||
self.harness.set_leader()
|
||||
self.harness.charm.leader_set({'ready': 'true'})
|
||||
self.harness.charm.leader_set({'foo': 'bar'})
|
||||
self.harness.charm.leader_set(ginger='biscuit')
|
||||
rel_data = self.harness.get_relation_data(rel_id, 'my-service')
|
||||
self.harness.charm.leader_set({"ready": "true"})
|
||||
self.harness.charm.leader_set({"foo": "bar"})
|
||||
self.harness.charm.leader_set(ginger="biscuit")
|
||||
rel_data = self.harness.get_relation_data(rel_id, "my-service")
|
||||
self.assertEqual(
|
||||
rel_data,
|
||||
{'ready': 'true', 'foo': 'bar', 'ginger': 'biscuit'})
|
||||
self.assertEqual(
|
||||
self.harness.charm.leader_get('ready'),
|
||||
'true')
|
||||
self.assertEqual(
|
||||
self.harness.charm.leader_get('foo'),
|
||||
'bar')
|
||||
self.assertEqual(
|
||||
self.harness.charm.leader_get('ginger'),
|
||||
'biscuit')
|
||||
rel_data, {"ready": "true", "foo": "bar", "ginger": "biscuit"}
|
||||
)
|
||||
self.assertEqual(self.harness.charm.leader_get("ready"), "true")
|
||||
self.assertEqual(self.harness.charm.leader_get("foo"), "bar")
|
||||
self.assertEqual(self.harness.charm.leader_get("ginger"), "biscuit")
|
||||
|
||||
def test_peer_leader_ready(self) -> None:
|
||||
"""Test peer leader ready methods."""
|
||||
rel_id = self.harness.add_relation('peers', 'my-service')
|
||||
self.harness.add_relation_unit(
|
||||
rel_id,
|
||||
'my-service/1')
|
||||
rel_id = self.harness.add_relation("peers", "my-service")
|
||||
self.harness.add_relation_unit(rel_id, "my-service/1")
|
||||
self.harness.set_leader()
|
||||
self.assertFalse(self.harness.charm.is_leader_ready())
|
||||
self.harness.charm.set_leader_ready()
|
||||
@ -271,87 +255,86 @@ class TestOSBaseOperatorAPICharm(_TestOSBaseOperatorAPICharm):
|
||||
# Add ingress relation
|
||||
test_utils.add_complete_ingress_relation(self.harness)
|
||||
self.assertEqual(
|
||||
self.harness.charm.internal_url,
|
||||
'http://internal-url')
|
||||
self.assertEqual(
|
||||
self.harness.charm.public_url,
|
||||
'http://public-url')
|
||||
self.harness.charm.internal_url, "http://internal-url"
|
||||
)
|
||||
self.assertEqual(self.harness.charm.public_url, "http://public-url")
|
||||
|
||||
@mock.patch('ops_sunbeam.charm.Client')
|
||||
@mock.patch("ops_sunbeam.charm.Client")
|
||||
def test_endpoint_urls_no_ingress(self, mock_client: mock.patch) -> None:
|
||||
"""Test public_url and internal_url with no ingress defined."""
|
||||
class mock_service:
|
||||
|
||||
class MockService:
|
||||
"""Mock lightkube client service object."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.status = None
|
||||
|
||||
mock_client.return_value = mock.MagicMock()
|
||||
mock_client.return_value.get.return_value = mock_service()
|
||||
mock_client.return_value.get.return_value = MockService()
|
||||
self.assertEqual(
|
||||
self.harness.charm.internal_url,
|
||||
'http://10.0.0.10:789')
|
||||
self.assertEqual(
|
||||
self.harness.charm.public_url,
|
||||
'http://10.0.0.10:789')
|
||||
self.harness.charm.internal_url, "http://10.0.0.10:789"
|
||||
)
|
||||
self.assertEqual(self.harness.charm.public_url, "http://10.0.0.10:789")
|
||||
|
||||
def test_relation_handlers_ready(self) -> None:
|
||||
"""Test relation handlers are ready."""
|
||||
# Add all mandatory relations and test relation_handlers_ready
|
||||
db_rel_id = test_utils.add_base_db_relation(self.harness)
|
||||
test_utils.add_db_relation_credentials(self.harness, db_rel_id)
|
||||
self.assertFalse(
|
||||
self.harness.charm.relation_handlers_ready())
|
||||
self.assertFalse(self.harness.charm.relation_handlers_ready())
|
||||
|
||||
amqp_rel_id = test_utils.add_base_amqp_relation(self.harness)
|
||||
test_utils.add_amqp_relation_credentials(self.harness, amqp_rel_id)
|
||||
self.assertFalse(
|
||||
self.harness.charm.relation_handlers_ready())
|
||||
self.assertFalse(self.harness.charm.relation_handlers_ready())
|
||||
|
||||
identity_rel_id = test_utils.add_base_identity_service_relation(
|
||||
self.harness)
|
||||
self.harness
|
||||
)
|
||||
test_utils.add_identity_service_relation_response(
|
||||
self.harness, identity_rel_id)
|
||||
self.assertFalse(
|
||||
self.harness.charm.relation_handlers_ready())
|
||||
self.harness, identity_rel_id
|
||||
)
|
||||
self.assertFalse(self.harness.charm.relation_handlers_ready())
|
||||
|
||||
ingress_rel_id = test_utils.add_ingress_relation(
|
||||
self.harness, 'public')
|
||||
self.harness, "public"
|
||||
)
|
||||
test_utils.add_ingress_relation_data(
|
||||
self.harness, ingress_rel_id, 'public')
|
||||
self.assertTrue(
|
||||
self.harness.charm.relation_handlers_ready())
|
||||
self.harness, ingress_rel_id, "public"
|
||||
)
|
||||
self.assertTrue(self.harness.charm.relation_handlers_ready())
|
||||
|
||||
# Add an optional relation and test if relation_handlers_ready
|
||||
# returns True
|
||||
optional_rel_id = test_utils.add_ingress_relation(
|
||||
self.harness, 'internal')
|
||||
self.harness, "internal"
|
||||
)
|
||||
test_utils.add_ingress_relation_data(
|
||||
self.harness, optional_rel_id, 'internal')
|
||||
self.assertTrue(
|
||||
self.harness.charm.relation_handlers_ready())
|
||||
self.harness, optional_rel_id, "internal"
|
||||
)
|
||||
self.assertTrue(self.harness.charm.relation_handlers_ready())
|
||||
|
||||
# Remove a mandatory relation and test if relation_handlers_ready
|
||||
# returns False
|
||||
self.harness.remove_relation(ingress_rel_id)
|
||||
self.assertFalse(
|
||||
self.harness.charm.relation_handlers_ready())
|
||||
self.assertFalse(self.harness.charm.relation_handlers_ready())
|
||||
|
||||
# Add the mandatory relation back and retest relation_handlers_ready
|
||||
ingress_rel_id = test_utils.add_ingress_relation(
|
||||
self.harness, 'public')
|
||||
self.harness, "public"
|
||||
)
|
||||
test_utils.add_ingress_relation_data(
|
||||
self.harness, ingress_rel_id, 'public')
|
||||
self.assertTrue(
|
||||
self.harness.charm.relation_handlers_ready())
|
||||
self.harness, ingress_rel_id, "public"
|
||||
)
|
||||
self.assertTrue(self.harness.charm.relation_handlers_ready())
|
||||
|
||||
|
||||
class TestOSBaseOperatorMultiSVCAPICharm(_TestOSBaseOperatorAPICharm):
|
||||
"""Test Charm with multiple services."""
|
||||
|
||||
@mock.patch(
|
||||
'charms.observability_libs.v0.kubernetes_service_patch.'
|
||||
'KubernetesServicePatch')
|
||||
"charms.observability_libs.v0.kubernetes_service_patch."
|
||||
"KubernetesServicePatch"
|
||||
)
|
||||
def setUp(self, mock_svc_patch: mock.patch) -> None:
|
||||
"""Charm test class setip."""
|
||||
super().setUp(test_charms.TestMultiSvcCharm)
|
||||
@ -362,10 +345,11 @@ class TestOSBaseOperatorMultiSVCAPICharm(_TestOSBaseOperatorAPICharm):
|
||||
self.harness.set_leader()
|
||||
test_utils.add_complete_peer_relation(self.harness)
|
||||
self.set_pebble_ready()
|
||||
self.harness.charm.leader_set({'foo': 'bar'})
|
||||
self.harness.charm.leader_set({"foo": "bar"})
|
||||
test_utils.add_api_relations(self.harness)
|
||||
test_utils.add_complete_cloud_credentials_relation(self.harness)
|
||||
self.harness.set_can_connect('my-service', True)
|
||||
self.harness.set_can_connect("my-service", True)
|
||||
self.assertEqual(
|
||||
sorted(self.container_calls.started_services('my-service')),
|
||||
sorted(['apache forwarder', 'my-service']))
|
||||
sorted(self.container_calls.started_services("my-service")),
|
||||
sorted(["apache forwarder", "my-service"]),
|
||||
)
|
||||
|
@ -14,25 +14,27 @@
|
||||
|
||||
"""Test ops_sunbeam.templating."""
|
||||
|
||||
import mock
|
||||
import sys
|
||||
from io import (
|
||||
BytesIO,
|
||||
TextIOWrapper,
|
||||
)
|
||||
|
||||
import jinja2
|
||||
from io import TextIOWrapper, BytesIO
|
||||
import mock
|
||||
|
||||
sys.path.append('lib') # noqa
|
||||
sys.path.append('src') # noqa
|
||||
sys.path.append("lib") # noqa
|
||||
sys.path.append("src") # noqa
|
||||
|
||||
import ops_sunbeam.test_utils as test_utils
|
||||
import ops_sunbeam.templating as sunbeam_templating
|
||||
import ops_sunbeam.core as sunbeam_core
|
||||
import ops_sunbeam.templating as sunbeam_templating
|
||||
import ops_sunbeam.test_utils as test_utils
|
||||
|
||||
|
||||
class TestTemplating(test_utils.CharmTestCase):
|
||||
"""Tests for ops_sunbeam.templating.."""
|
||||
|
||||
PATCHES = [
|
||||
'get_loader'
|
||||
]
|
||||
PATCHES = ["get_loader"]
|
||||
|
||||
def setUp(self) -> None:
|
||||
"""Charm test class setup."""
|
||||
@ -42,41 +44,35 @@ class TestTemplating(test_utils.CharmTestCase):
|
||||
"""Check rendering templates."""
|
||||
container_mock = mock.MagicMock()
|
||||
config = sunbeam_core.ContainerConfigFile(
|
||||
"/tmp/testfile.txt",
|
||||
"myuser",
|
||||
"mygrp"
|
||||
"/tmp/testfile.txt", "myuser", "mygrp"
|
||||
)
|
||||
self.get_loader.return_value = jinja2.DictLoader(
|
||||
{"testfile.txt": "debug = {{ debug }}"}
|
||||
)
|
||||
self.get_loader.return_value = jinja2.DictLoader({
|
||||
'testfile.txt': 'debug = {{ debug }}'})
|
||||
sunbeam_templating.sidecar_config_render(
|
||||
container_mock,
|
||||
config,
|
||||
"/tmp/templates",
|
||||
"essex",
|
||||
{'debug': True})
|
||||
container_mock, config, "/tmp/templates", "essex", {"debug": True}
|
||||
)
|
||||
container_mock.push.assert_called_once_with(
|
||||
'/tmp/testfile.txt',
|
||||
'debug = True',
|
||||
user='myuser',
|
||||
group='mygrp',
|
||||
permissions=None)
|
||||
"/tmp/testfile.txt",
|
||||
"debug = True",
|
||||
user="myuser",
|
||||
group="mygrp",
|
||||
permissions=None,
|
||||
)
|
||||
|
||||
def test_render_no_change(self) -> None:
|
||||
"""Check rendering template with no content change."""
|
||||
container_mock = mock.MagicMock()
|
||||
container_mock.pull.return_value = TextIOWrapper(
|
||||
BytesIO(b'debug = True'))
|
||||
config = sunbeam_core.ContainerConfigFile(
|
||||
"/tmp/testfile.txt",
|
||||
"myuser",
|
||||
"mygrp"
|
||||
BytesIO(b"debug = True")
|
||||
)
|
||||
config = sunbeam_core.ContainerConfigFile(
|
||||
"/tmp/testfile.txt", "myuser", "mygrp"
|
||||
)
|
||||
self.get_loader.return_value = jinja2.DictLoader(
|
||||
{"testfile.txt": "debug = {{ debug }}"}
|
||||
)
|
||||
self.get_loader.return_value = jinja2.DictLoader({
|
||||
'testfile.txt': 'debug = {{ debug }}'})
|
||||
sunbeam_templating.sidecar_config_render(
|
||||
container_mock,
|
||||
config,
|
||||
"/tmp/templates",
|
||||
"essex",
|
||||
{'debug': True})
|
||||
container_mock, config, "/tmp/templates", "essex", {"debug": True}
|
||||
)
|
||||
self.assertFalse(container_mock.push.called)
|
||||
|
Loading…
x
Reference in New Issue
Block a user