From 73e2217c62fe4e3feef4529a07abed86faf91f15 Mon Sep 17 00:00:00 2001 From: Raul Zamora Date: Wed, 16 Aug 2023 15:00:26 +0200 Subject: [PATCH 1/5] add two cluster ha test --- tests/integration/helpers.py | 11 +++++ tests/integration/test_ha.py | 86 ++++++++++++++++++++++++++++++++++++ tox.ini | 10 +++++ 3 files changed, 107 insertions(+) create mode 100644 tests/integration/test_ha.py diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index c28de71a..5de01b2a 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -262,6 +262,17 @@ def produce_and_check_logs( message = f"Message #{i}" client.produce_message(topic_name=topic, message_content=message) + check_logs(model_full_name, kafka_unit_name, topic) + + +def check_logs(model_full_name: str, kafka_unit_name: str, topic: str) -> None: + """Checks if messages for a topic have been produced. + + Args: + model_full_name: the full name of the model + kafka_unit_name: the kafka unit to checks logs on + topic: the desired topic to check + """ logs = check_output( f"JUJU_MODEL={model_full_name} juju ssh {kafka_unit_name} sudo -i 'find {KafkaSnap.DATA_PATH}/data'", stderr=PIPE, diff --git a/tests/integration/test_ha.py b/tests/integration/test_ha.py new file mode 100644 index 00000000..fbcd3291 --- /dev/null +++ b/tests/integration/test_ha.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +import asyncio +import logging + +import pytest +from pytest_operator.plugin import OpsTest + +from .helpers import ( + APP_NAME, + REL_NAME_ADMIN, + ZK_NAME, + check_logs, + produce_and_check_logs, +) + +logger = logging.getLogger(__name__) + + +DUMMY_NAME = "app" + + +@pytest.mark.abort_on_fail +async def test_build_and_deploy(ops_test: OpsTest, kafka_charm): + await asyncio.gather( + ops_test.model.deploy(kafka_charm, application_name=APP_NAME, num_units=1, series="jammy"), + ops_test.model.deploy(ZK_NAME, channel="edge", num_units=1, series="jammy"), + ) + await ops_test.model.wait_for_idle(apps=[APP_NAME, ZK_NAME], idle_period=30, timeout=3600) + assert ops_test.model.applications[APP_NAME].status == "blocked" + assert ops_test.model.applications[ZK_NAME].status == "active" + + await ops_test.model.add_relation(APP_NAME, ZK_NAME) + async with ops_test.fast_forward(): + await ops_test.model.wait_for_idle(apps=[APP_NAME, ZK_NAME], idle_period=30) + assert ops_test.model.applications[APP_NAME].status == "active" + assert ops_test.model.applications[ZK_NAME].status == "active" + + +async def test_second_cluster(ops_test: OpsTest, kafka_charm, app_charm): + second_kafka_name = f"{APP_NAME}-two" + second_zk_name = f"{ZK_NAME}-two" + + await asyncio.gather( + ops_test.model.deploy( + kafka_charm, application_name=second_kafka_name, num_units=1, series="jammy" + ), + ops_test.model.deploy( + ZK_NAME, channel="edge", application_name=second_zk_name, num_units=1, series="jammy" + ), + ops_test.model.deploy(app_charm, application_name=DUMMY_NAME, num_units=1, series="jammy"), + ) + + await ops_test.model.wait_for_idle( + apps=[second_kafka_name, second_zk_name, DUMMY_NAME], + idle_period=30, + timeout=3600, + ) + assert ops_test.model.applications[second_kafka_name].status == "blocked" + + await ops_test.model.add_relation(second_kafka_name, second_zk_name) + + # Relate "app" to the *first* cluster + await ops_test.model.add_relation(APP_NAME, f"{DUMMY_NAME}:{REL_NAME_ADMIN}") + + await ops_test.model.wait_for_idle( + apps=[second_kafka_name, second_zk_name, DUMMY_NAME, APP_NAME], + idle_period=30, + ) + + produce_and_check_logs( + model_full_name=ops_test.model_full_name, + kafka_unit_name=f"{APP_NAME}/0", + provider_unit_name=f"{DUMMY_NAME}/0", + topic="hot-topic", + ) + + # Check that logs are not found on the second cluster + with pytest.raises(AssertionError): + check_logs( + model_full_name=ops_test.model_full_name, + kafka_unit_name=f"{second_kafka_name}/0", + topic="hot-topic", + ) diff --git a/tox.ini b/tox.ini index a265bb48..15d1a28e 100644 --- a/tox.ini +++ b/tox.ini @@ -127,3 +127,13 @@ pass_env = commands = poetry install --with integration poetry run pytest -vv --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_tls.py + +[testenv:integration-ha] +description = Run TLS integration tests +pass_env = + {[testenv]pass_env} + CI + CI_PACKED_CHARMS +commands = + poetry install --with integration + poetry run pytest -vv --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_ha.py From 13d857ebb0aa8cdf6831d8ae8c86033bd4fcec38 Mon Sep 17 00:00:00 2001 From: Raul Zamora Date: Wed, 16 Aug 2023 15:41:52 +0200 Subject: [PATCH 2/5] add test to CI --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f7504a2e..c850f423 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -54,6 +54,7 @@ jobs: - integration-scaling - integration-password-rotation - integration-tls + - integration-ha name: ${{ matrix.tox-environments }} needs: - lint From fd8aafae5574c85a8f262cc3a6fcc9d71ed0f14b Mon Sep 17 00:00:00 2001 From: Raul Zamora Date: Mon, 21 Aug 2023 12:42:36 +0200 Subject: [PATCH 3/5] restructure tests pipeline --- .github/workflows/ci.yaml | 44 ++++++++- tests/integration/ha/helpers.py | 129 ++++++++++++++++++++++++++ tests/integration/{ => ha}/test_ha.py | 2 +- tox.ini | 59 +++--------- 4 files changed, 186 insertions(+), 48 deletions(-) create mode 100644 tests/integration/ha/helpers.py rename tests/integration/{ => ha}/test_ha.py (99%) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c850f423..d848ddb1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -54,7 +54,6 @@ jobs: - integration-scaling - integration-password-rotation - integration-tls - - integration-ha name: ${{ matrix.tox-environments }} needs: - lint @@ -91,3 +90,46 @@ jobs: run: tox run -e ${{ matrix.tox-environments }} -- -m '${{ steps.select-tests.outputs.mark_expression }}' env: CI_PACKED_CHARMS: ${{ needs.build.outputs.charms }} + + integration-test-ha: + strategy: + fail-fast: false + matrix: + tox-environments: + - integration-ha + name: ${{ matrix.tox-environments }} + needs: + - lint + - unit-test + - build + - integration-test + runs-on: ubuntu-latest + timeout-minutes: 120 + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Setup operator environment + # TODO: Replace with custom image on self-hosted runner + uses: charmed-kubernetes/actions-operator@main + with: + provider: lxd + bootstrap-options: "--agent-version 2.9.38" + - name: Download packed charm(s) + uses: actions/download-artifact@v3 + with: + name: ${{ needs.build.outputs.artifact-name }} + - name: Select tests + id: select-tests + run: | + if [ "${{ github.event_name }}" == "schedule" ] + then + echo Running unstable and stable tests + echo "mark_expression=" >> $GITHUB_OUTPUT + else + echo Skipping unstable tests + echo "mark_expression=not unstable" >> $GITHUB_OUTPUT + fi + - name: Run integration tests + run: tox run -e ${{ matrix.tox-environments }} -- -m '${{ steps.select-tests.outputs.mark_expression }}' + env: + CI_PACKED_CHARMS: ${{ needs.build.outputs.charms }} \ No newline at end of file diff --git a/tests/integration/ha/helpers.py b/tests/integration/ha/helpers.py new file mode 100644 index 00000000..d8123d28 --- /dev/null +++ b/tests/integration/ha/helpers.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python3 +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. +import logging +from pathlib import Path +from subprocess import PIPE, check_output +from typing import Any, Dict + +import yaml +from charms.kafka.v0.client import KafkaClient +from kafka.admin import NewTopic + +from snap import KafkaSnap + +METADATA = yaml.safe_load(Path("./metadata.yaml").read_text()) +APP_NAME = METADATA["name"] +ZK_NAME = "zookeeper" +REL_NAME_ADMIN = "kafka-client-admin" + +logger = logging.getLogger(__name__) + + +def produce_and_check_logs( + model_full_name: str, kafka_unit_name: str, provider_unit_name: str, topic: str +) -> None: + """Produces messages from HN to chosen Kafka topic. + + Args: + model_full_name: the full name of the model + kafka_unit_name: the kafka unit to checks logs on + provider_unit_name: the app to grab credentials from + topic: the desired topic to produce to + + Raises: + KeyError: if missing relation data + AssertionError: if logs aren't found for desired topic + """ + relation_data = get_provider_data( + unit_name=provider_unit_name, + model_full_name=model_full_name, + endpoint="kafka-client-admin", + ) + topic = topic + username = relation_data.get("username", None) + password = relation_data.get("password", None) + servers = relation_data.get("endpoints", "").split(",") + security_protocol = "SASL_PLAINTEXT" + + if not (username and password and servers): + raise KeyError("missing relation data from app charm") + + client = KafkaClient( + servers=servers, + username=username, + password=password, + security_protocol=security_protocol, + ) + topic_config = NewTopic( + name=topic, + num_partitions=5, + replication_factor=1, + ) + + client.create_topic(topic=topic_config) + for i in range(15): + message = f"Message #{i}" + client.produce_message(topic_name=topic, message_content=message) + + check_logs(model_full_name, kafka_unit_name, topic) + + +def check_logs(model_full_name: str, kafka_unit_name: str, topic: str) -> None: + """Checks if messages for a topic have been produced. + + Args: + model_full_name: the full name of the model + kafka_unit_name: the kafka unit to checks logs on + topic: the desired topic to check + """ + logs = check_output( + f"JUJU_MODEL={model_full_name} juju ssh {kafka_unit_name} sudo -i 'find {KafkaSnap.DATA_PATH}/data'", + stderr=PIPE, + shell=True, + universal_newlines=True, + ).splitlines() + + logger.debug(f"{logs=}") + + passed = False + for log in logs: + if topic and "index" in log: + passed = True + break + + assert passed, "logs not found" + + +def get_provider_data( + unit_name: str, model_full_name: str, endpoint: str = "kafka-client" +) -> Dict[str, str]: + result = show_unit(unit_name=unit_name, model_full_name=model_full_name) + relations_info = result[unit_name]["relation-info"] + logger.info(f"Relation info: {relations_info}") + provider_relation_data = {} + for info in relations_info: + if info["endpoint"] == endpoint: + logger.info(f"Relation data: {info}") + provider_relation_data["username"] = info["application-data"]["username"] + provider_relation_data["password"] = info["application-data"]["password"] + provider_relation_data["endpoints"] = info["application-data"]["endpoints"] + provider_relation_data["zookeeper-uris"] = info["application-data"]["zookeeper-uris"] + provider_relation_data["tls"] = info["application-data"]["tls"] + if "consumer-group-prefix" in info["application-data"]: + provider_relation_data["consumer-group-prefix"] = info["application-data"][ + "consumer-group-prefix" + ] + provider_relation_data["topic"] = info["application-data"]["topic"] + return provider_relation_data + + +def show_unit(unit_name: str, model_full_name: str) -> Any: + result = check_output( + f"JUJU_MODEL={model_full_name} juju show-unit {unit_name}", + stderr=PIPE, + shell=True, + universal_newlines=True, + ) + + return yaml.safe_load(result) diff --git a/tests/integration/test_ha.py b/tests/integration/ha/test_ha.py similarity index 99% rename from tests/integration/test_ha.py rename to tests/integration/ha/test_ha.py index fbcd3291..e90eb1e7 100644 --- a/tests/integration/test_ha.py +++ b/tests/integration/ha/test_ha.py @@ -8,7 +8,7 @@ import pytest from pytest_operator.plugin import OpsTest -from .helpers import ( +from helpers import ( APP_NAME, REL_NAME_ADMIN, ZK_NAME, diff --git a/tox.ini b/tox.ini index 15d1a28e..5342d8bd 100644 --- a/tox.ini +++ b/tox.ini @@ -21,6 +21,13 @@ set_env = PYTHONPATH = {tox_root}/lib:{[vars]src_path} PYTHONBREAKPOINT=ipdb.set_trace PY_COLORS=1 + charm: TEST_FILE=test_charm.py + provider: TEST_FILE=test_provider.py + scaling: TEST_FILE=test_scaling.py + password-rotation: TEST_FILE=test_password_rotation.py + tls: TEST_FILE=test_tls.py + ha: TEST_FILE=test_ha.py + pass_env = PYTHONPATH CHARM_BUILD_DIR @@ -78,62 +85,22 @@ commands = poetry install --with integration poetry run pytest -vv --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/ -[testenv:integration-charm] -description = Run base integration tests -pass_env = - {[testenv]pass_env} - CI - CI_PACKED_CHARMS -commands = - poetry install --with integration - poetry run pytest -vv --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_charm.py - -[testenv:integration-provider] -description = Run integration tests for provider -pass_env = - {[testenv]pass_env} - CI - CI_PACKED_CHARMS -commands = - poetry install --with integration - poetry run pytest -vv --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_provider.py - -[testenv:integration-scaling] -description = Run scaling integration tests -pass_env = - {[testenv]pass_env} - CI - CI_PACKED_CHARMS -commands = - poetry install --with integration - poetry run pytest -vv --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_scaling.py - -[testenv:integration-password-rotation] -description = Run password rotation integration tests -pass_env = - {[testenv]pass_env} - CI - CI_PACKED_CHARMS -commands = - poetry install --with integration - poetry run pytest -vv --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_password_rotation.py - -[testenv:integration-tls] -description = Run TLS integration tests +[testenv:integration-{charm,provider,scaling,password-rotation,tls}] +description = Run integration tests pass_env = {[testenv]pass_env} CI CI_PACKED_CHARMS commands = poetry install --with integration - poetry run pytest -vv --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_tls.py + poetry run pytest -vv --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/{env:TEST_FILE} -[testenv:integration-ha] -description = Run TLS integration tests +[testenv:integration-ha-{ha}] +description = Run integration tests for high availability pass_env = {[testenv]pass_env} CI CI_PACKED_CHARMS commands = poetry install --with integration - poetry run pytest -vv --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/test_ha.py + poetry run pytest -vv --tb native --log-cli-level=INFO -s {posargs} {[vars]tests_path}/integration/ha/{env:TEST_FILE} From 60e6e7a9206d77a36188cc1fef698adf69e91bd3 Mon Sep 17 00:00:00 2001 From: Raul Zamora Date: Mon, 21 Aug 2023 12:52:44 +0200 Subject: [PATCH 4/5] fix lint --- tests/integration/ha/test_ha.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/integration/ha/test_ha.py b/tests/integration/ha/test_ha.py index e90eb1e7..94c69417 100644 --- a/tests/integration/ha/test_ha.py +++ b/tests/integration/ha/test_ha.py @@ -6,8 +6,6 @@ import logging import pytest -from pytest_operator.plugin import OpsTest - from helpers import ( APP_NAME, REL_NAME_ADMIN, @@ -15,6 +13,7 @@ check_logs, produce_and_check_logs, ) +from pytest_operator.plugin import OpsTest logger = logging.getLogger(__name__) From 4cbdadb4044c0d210c689da6ea3f520dd9394164 Mon Sep 17 00:00:00 2001 From: Raul Zamora Date: Mon, 21 Aug 2023 14:00:04 +0200 Subject: [PATCH 5/5] tell better joke to the pipeline --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d848ddb1..0eba70c5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -96,7 +96,7 @@ jobs: fail-fast: false matrix: tox-environments: - - integration-ha + - integration-ha-ha name: ${{ matrix.tox-environments }} needs: - lint