diff --git a/requirements.txt b/requirements.txt index d9a7819..74f53a9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,9 @@ backports.cached-property -ops>=1.3.0,<2.0.0 +ops lightkube>=0.10.1,<1.0.0 pyyaml pydantic==1.* ops.manifest>=1.1.0,<2.0.0 -git+https://github.com/charmed-kubernetes/interface-kube-control.git@6dd289d1c795fdeda1bed17873b8d6562227c829#subdirectory=ops -git+https://github.com/charmed-kubernetes/interface-tls-certificates.git@339efe3823b9728d16cdf5bcd1fc3b5de4e68923#subdirectory=ops \ No newline at end of file +ops.interface-kube-control @ git+https://github.com/charmed-kubernetes/interface-kube-control.git@main#subdirectory=ops +ops.interface-tls-certificates @ git+https://github.com/charmed-kubernetes/interface-tls-certificates.git@main#subdirectory=ops +ops.interface_gcp @ git+https://github.com/charmed-kubernetes/interface-gcp-integration@main#subdirectory=ops diff --git a/src/charm.py b/src/charm.py index e85c56a..a370240 100755 --- a/src/charm.py +++ b/src/charm.py @@ -8,6 +8,7 @@ from ops.charm import CharmBase from ops.framework import StoredState +from ops.interface_gcp.requires import GCPIntegrationRequires from ops.interface_kube_control import KubeControlRequirer from ops.interface_tls_certificates import CertificatesRequires from ops.main import main @@ -16,7 +17,6 @@ from config import CharmConfig from provider_manifests import GCPProviderManifests -from requires_gcp_integration import GCPIntegratorRequires log = logging.getLogger(__name__) @@ -32,7 +32,7 @@ def __init__(self, *args): super().__init__(*args) # Relation Validator and datastore - self.integrator = GCPIntegratorRequires(self) + self.integrator = GCPIntegrationRequires(self, "gcp-integration") self.kube_control = KubeControlRequirer(self) self.certificates = CertificatesRequires(self) # Config Validator and datastore @@ -105,6 +105,8 @@ def _sync_resources(self, event): def _request_gcp_features(self, event): self.integrator.enable_instance_inspection() + self.integrator.enable_network_management() + self.integrator.enable_security_management() self._merge_config(event=event) def _update_status(self, _): diff --git a/src/provider_manifests.py b/src/provider_manifests.py index 51d6bc6..6aedb3b 100644 --- a/src/provider_manifests.py +++ b/src/provider_manifests.py @@ -1,6 +1,7 @@ # Copyright 2022 Canonical Ltd. # See LICENSE file for licensing details. """Implementation of gcp specific details of the kubernetes manifests.""" +import base64 import logging import pickle from hashlib import md5 @@ -31,11 +32,14 @@ class CreateSecret(Addition): def __call__(self) -> Optional[AnyResource]: """Craft the secrets object for the deployment.""" - secret_config = {SECRET_DATA: self.manifests.config.get(SECRET_DATA)} - if any(s is None for s in secret_config.values()): - log.error("secret data item is None") + creds: Optional[str] = self.manifests.config.get(SECRET_DATA) + if not creds: + log.error("secret data item is Unavailable") return None + b64_creds = base64.b64encode(creds.encode()).decode() + secret_config = {SECRET_DATA: b64_creds} + log.info("Encoding secret data for cloud-controller.") return from_dict( dict( @@ -179,7 +183,7 @@ def config(self) -> Dict: """Returns current config available from charm config and joined relations.""" config = {} if self.integrator.is_ready: - config[SECRET_DATA] = self.integrator.credentials.decode() + config[SECRET_DATA] = self.integrator.credentials if self.kube_control.is_ready: config["image-registry"] = self.kube_control.get_registry_location() config["control-node-taints"] = self.kube_control.get_controller_taints() or [ diff --git a/src/requires_gcp_integration.py b/src/requires_gcp_integration.py deleted file mode 100644 index 256ead3..0000000 --- a/src/requires_gcp_integration.py +++ /dev/null @@ -1,161 +0,0 @@ -# Copyright 2022 Canonical Ltd. -# See LICENSE file for licensing details. -"""Implementation of tls-certificates interface. - -This only implements the requires side, currently, since the providers -is still using the Reactive Charm framework self. -""" -import base64 -import json -import logging -import os -import random -import string -from typing import Mapping, Optional -from urllib.parse import urljoin -from urllib.request import Request, urlopen - -from backports.cached_property import cached_property -from ops.charm import RelationBrokenEvent -from ops.framework import Object, StoredState -from pydantic import BaseModel, Json, SecretStr, ValidationError, validator - -log = logging.getLogger(__name__) - - -# block size to read data from GCP metadata service -# (realistically, just needs to be bigger than ~20 chars) -READ_BLOCK_SIZE = 2048 - - -class Data(BaseModel): - """Databag for information shared over the relation.""" - - completed: Json[Mapping[str, str]] - credentials: Json[SecretStr] - - @validator("credentials") - def must_be_json(cls, s: Json[SecretStr]): - """Validate cloud-sa is base64 encoded json.""" - secret_val = s.get_secret_value() - try: - json.loads(secret_val) - except json.JSONDecodeError: - raise ValueError("Couldn't find json data") - return s - - -class GCPIntegratorRequires(Object): - """Requires side of gcp-integration relation.""" - - stored = StoredState() - - # https://cloud.google.com/compute/docs/storing-retrieving-metadata - _metadata_url = "http://metadata.google.internal/computeMetadata/v1/" - _instance_url = urljoin(_metadata_url, "instance/name") - _zone_url = urljoin(_metadata_url, "instance/zone") - _metadata_headers = {"Metadata-Flavor": "Google"} - - def __init__(self, charm, endpoint="gcp-integration"): - super().__init__(charm, f"relation-{endpoint}") - self.endpoint = endpoint - events = charm.on[endpoint] - self._unit_name = self.model.unit.name.replace("/", "_") - self.framework.observe(events.relation_joined, self._joined) - self.stored.set_default( - instance=None, # stores the instance name - zone=None, # stores the zone of this instance - ) - - def _joined(self, event): - to_publish = self.relation.data[self.model.unit] - to_publish["charm"] = self.model.app.name - to_publish["instance"] = self.instance - to_publish["zone"] = self.zone - to_publish["model-uuid"] = os.environ["JUJU_MODEL_UUID"] - - @cached_property - def relation(self): - """The relation to the integrator, or None.""" - return self.model.get_relation(self.endpoint) - - @cached_property - def _raw_data(self): - if self.relation and self.relation.units: - return self.relation.data[list(self.relation.units)[0]] - return None - - @cached_property - def _data(self) -> Optional[Data]: - raw = self._raw_data - return Data(**raw) if raw else None - - def evaluate_relation(self, event) -> Optional[str]: - """Determine if relation is ready.""" - no_relation = not self.relation or ( - isinstance(event, RelationBrokenEvent) and event.relation is self.relation - ) - if not self.is_ready: - if no_relation: - return f"Missing required {self.endpoint}" - return f"Waiting for {self.endpoint}" - return None - - @property - def instance(self): - """This unit's instance name.""" - if self.stored.instance is None: - req = Request(self._instance_url, headers=self._metadata_headers) - with urlopen(req) as fd: - instance = fd.read(READ_BLOCK_SIZE).decode("utf8").strip() - self.stored.instance = instance - return self.stored.instance - - @property - def zone(self): - """The zone this unit is in.""" - if self.stored.zone is None: - req = Request(self._zone_url, headers=self._metadata_headers) - with urlopen(req) as fd: - zone = fd.read(READ_BLOCK_SIZE).decode("utf8").strip() - zone = zone.split("/")[-1] - self.stored.zone = zone - return self.stored.zone - - @property - def is_ready(self): - """Whether the request for this instance has been completed.""" - try: - self._data - except ValidationError as ve: - log.error(f"{self.endpoint} relation data not yet valid. ({ve}") - return False - if self._data is None: - log.error(f"{self.endpoint} relation data not yet available.") - return False - last_completed = self._data.completed.get(self.instance) - last_requested = self.relation.data[self.model.unit].get("requested") - log.info(f"{self.endpoint} completion {last_completed}?={last_requested}.") - return last_requested and last_completed == last_requested - - def _request(self, keyvals): - alphabet = string.ascii_letters + string.digits - nonce = "".join(random.choice(alphabet) for _ in range(8)) - to_publish = self.relation.data[self.model.unit] - to_publish.update({k: json.dumps(v) for k, v in keyvals.items()}) - to_publish["requested"] = nonce - - @property - def credentials(self) -> Optional[bytes]: - """Return credentials from integrator charm.""" - if not self.is_ready or not self._data: - return None - return base64.b64encode(self._data.credentials.get_secret_value().encode()) - - def enable_instance_inspection(self): - """Request the ability to manage block storage.""" - self._request({"enable-instance-inspection": True}) - - def enable_block_storage_management(self): - """Request the ability to manage block storage.""" - self._request({"enable-block-storage-management": True}) diff --git a/tests/data/charm.yaml b/tests/data/charm.yaml index d707096..95ffdb8 100644 --- a/tests/data/charm.yaml +++ b/tests/data/charm.yaml @@ -2,11 +2,13 @@ description: Overlay for attaching current charm applications: gcp-integrator: charm: gcp-integrator - channel: edge + channel: latest/stable num_units: 1 trust: true gcp-cloud-provider: charm: {{charm}} + options: + enable-loadbalancers: true relations: - ["gcp-cloud-provider:certificates", "easyrsa:client"] - ["gcp-cloud-provider:kube-control", "kubernetes-control-plane"] diff --git a/tests/data/lb-test.yaml b/tests/data/lb-test.yaml new file mode 100644 index 0000000..a586c7c --- /dev/null +++ b/tests/data/lb-test.yaml @@ -0,0 +1,42 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: hello-world + run: load-balancer-example + name: hello +spec: + replicas: 5 + selector: + matchLabels: + app: hello-world + template: + metadata: + labels: + app: hello-world + run: load-balancer-example + spec: + containers: + - image: rocks.canonical.com/cdk/google-samples/node-hello:1.0 + name: node-hello + ports: + - containerPort: 8080 +--- +apiVersion: v1 +kind: Service +metadata: + creationTimestamp: null + labels: + app: hello-world + run: load-balancer-example + name: hello +spec: + ports: + - port: 8080 + protocol: TCP + targetPort: 8080 + selector: + app: hello-world + type: LoadBalancer +status: + loadBalancer: {} \ No newline at end of file diff --git a/tests/integration/test_gcp_cloud_provider.py b/tests/integration/test_gcp_cloud_provider.py index 9e2040c..4c44595 100644 --- a/tests/integration/test_gcp_cloud_provider.py +++ b/tests/integration/test_gcp_cloud_provider.py @@ -2,10 +2,13 @@ # See LICENSE file for licensing details. import logging import shlex +import urllib.request from pathlib import Path import pytest -from lightkube.resources.core_v1 import Node +from lightkube.codecs import load_all_yaml +from lightkube.resources.apps_v1 import Deployment +from lightkube.resources.core_v1 import Node, Service log = logging.getLogger(__name__) @@ -43,3 +46,24 @@ async def test_build_and_deploy(ops_test): async def test_provider_ids(kubernetes): async for node in kubernetes.list(Node): assert node.spec.providerID.startswith("gce://") + + +async def test_loadbalancer(kubernetes): + log.info("Starting hello-world on port=8080.") + lb_yaml = Path("tests/data/lb-test.yaml") + lb_content = load_all_yaml(lb_yaml.open()) + try: + for obj in lb_content: + await kubernetes.create(obj, obj.metadata.name) + await kubernetes.wait(Deployment, "hello", for_conditions=["Available"]) + async for _, dep in kubernetes.watch(Service, fields={"metadata.name": "hello"}): + if dep.status.loadBalancer.ingress: + break + assert dep.status.loadBalancer.ingress[0].ip + with urllib.request.urlopen( + f"http://{dep.status.loadBalancer.ingress[0].ip}:8080" + ) as resp: + assert b"Hello Kubernetes!" in resp.read() + finally: + for obj in lb_content: + await kubernetes.delete(type(obj), obj.metadata.name) diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index 954d17e..29d6084 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -58,12 +58,12 @@ def kube_control(): yield kube_control -@pytest.fixture() +@pytest.fixture(autouse=True) def gcp_integration(): - with mock.patch("charm.GCPIntegratorRequires") as mocked: + with mock.patch("charm.GCPIntegrationRequires") as mocked: integration = mocked.return_value - integration.credentials = b"{}" - integration.evaluate_relation.return_value = None + integration.credentials = "{}" + integration.is_ready = False yield integration @@ -129,8 +129,9 @@ def test_waits_for_kube_control(mock_create_kubeconfig, harness): assert charm.unit.status.message == "Provider manifests waiting for definition of gcp-creds" -@pytest.mark.usefixtures("certificates", "kube_control", "gcp_integration") -def test_waits_for_config(harness: Harness, lk_client, caplog): +@pytest.mark.usefixtures("certificates", "kube_control") +def test_waits_for_config(harness: Harness, lk_client, caplog, gcp_integration): + gcp_integration.is_ready = True harness.begin_with_initial_hooks() with mock.patch.object(lk_client, "list") as mock_list: mock_list.return_value = [mock.Mock(**{"metadata.annotations": {}})]