diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2670a0757..3c0bdab19 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -81,6 +81,7 @@ jobs: - tls-integration - backup-integration - metric-integration + - sharding-integration name: ${{ matrix.tox-environments }} needs: - lint diff --git a/tests/integration/sharding_tests/helpers.py b/tests/integration/sharding_tests/helpers.py new file mode 100644 index 000000000..868d45b1e --- /dev/null +++ b/tests/integration/sharding_tests/helpers.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. +from urllib.parse import quote_plus + +from pymongo import MongoClient +from pytest_operator.plugin import OpsTest + +from ..helpers import get_password + +MONGOS_PORT = 27018 +MONGOD_PORT = 27017 + + +async def generate_mongodb_client(ops_test: OpsTest, app_name: str, mongos: bool): + """Returns a MongoDB client for mongos/mongod.""" + hosts = [unit.public_address for unit in ops_test.model.applications[app_name].units] + password = await get_password(ops_test, app_name) + port = MONGOS_PORT if mongos else MONGOD_PORT + hosts = [f"{host}:{port}" for host in hosts] + hosts = ",".join(hosts) + auth_source = "" + database = "admin" + + return MongoClient( + f"mongodb://operator:" + f"{quote_plus(password)}@" + f"{hosts}/{quote_plus(database)}?" + f"{auth_source}" + ) + + +def write_data_to_mongodb(client, db_name, coll_name, content) -> None: + """Writes data to the provided collection and database.""" + db = client[db_name] + horses_collection = db[coll_name] + horses_collection.insert_one(content) + + +def verify_data_mongodb(client, db_name, coll_name, key, value) -> bool: + """Checks a key/value pair for a provided collection and database.""" + db = client[db_name] + test_collection = db[coll_name] + query = test_collection.find({}, {key: 1}) + return query[0][key] == value diff --git a/tests/integration/sharding_tests/test_sharding.py b/tests/integration/sharding_tests/test_sharding.py new file mode 100644 index 000000000..d22166e86 --- /dev/null +++ b/tests/integration/sharding_tests/test_sharding.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python3 +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. +import pytest +from pytest_operator.plugin import OpsTest + +from .helpers import generate_mongodb_client, verify_data_mongodb, write_data_to_mongodb + +SHARD_ONE_APP_NAME = "shard-one" +SHARD_TWO_APP_NAME = "shard-two" +CONFIG_SERVER_APP_NAME = "config-server-one" +SHARD_REL_NAME = "sharding" +CONFIG_SERVER_REL_NAME = "config-server" +MONGODB_KEYFILE_PATH = "/var/snap/charmed-mongodb/current/etc/mongod/keyFile" +TIMEOUT = 15 * 60 + + +@pytest.mark.abort_on_fail +async def test_build_and_deploy(ops_test: OpsTest) -> None: + """Build and deploy a sharded cluster.""" + my_charm = await ops_test.build_charm(".") + await ops_test.model.deploy( + my_charm, + num_units=2, + config={"role": "config-server"}, + application_name=CONFIG_SERVER_APP_NAME, + ) + await ops_test.model.deploy( + my_charm, num_units=2, config={"role": "shard"}, application_name=SHARD_ONE_APP_NAME + ) + await ops_test.model.deploy( + my_charm, num_units=2, config={"role": "shard"}, application_name=SHARD_TWO_APP_NAME + ) + + async with ops_test.fast_forward(): + await ops_test.model.wait_for_idle( + apps=[CONFIG_SERVER_APP_NAME, SHARD_ONE_APP_NAME, SHARD_TWO_APP_NAME], + idle_period=20, + raise_on_blocked=False, + timeout=TIMEOUT, + ) + + # TODO Future PR: assert that CONFIG_SERVER_APP_NAME, SHARD_ONE_APP_NAME, SHARD_TWO_APP_NAME + # are blocked waiting for relaitons + + +@pytest.mark.abort_on_fail +async def test_cluster_active(ops_test: OpsTest) -> None: + """Tests the integration of cluster components works without error.""" + await ops_test.model.integrate( + f"{SHARD_ONE_APP_NAME}:{SHARD_REL_NAME}", + f"{CONFIG_SERVER_APP_NAME}:{CONFIG_SERVER_REL_NAME}", + ) + await ops_test.model.integrate( + f"{SHARD_TWO_APP_NAME}:{SHARD_REL_NAME}", + f"{CONFIG_SERVER_APP_NAME}:{CONFIG_SERVER_REL_NAME}", + ) + + async with ops_test.fast_forward(): + await ops_test.model.wait_for_idle( + apps=[CONFIG_SERVER_APP_NAME, SHARD_ONE_APP_NAME, SHARD_TWO_APP_NAME], + idle_period=20, + status="active", + timeout=TIMEOUT, + ) + + # TODO Future PR: assert that CONFIG_SERVER_APP_NAME, SHARD_ONE_APP_NAME, SHARD_TWO_APP_NAME + # have the correct active statuses. + + +async def test_sharding(ops_test: OpsTest) -> None: + """Tests writing data to mongos gets propagated to shards.""" + # write data to mongos on both shards. + mongos_client = await generate_mongodb_client( + ops_test, app_name=CONFIG_SERVER_APP_NAME, mongos=True + ) + + # write data to shard one + write_data_to_mongodb( + mongos_client, + db_name="animals_database_1", + coll_name="horses", + content={"horse-breed": "unicorn", "real": True}, + ) + mongos_client.admin.command("movePrimary", "animals_database_1", to=SHARD_ONE_APP_NAME) + + # write data to shard two + write_data_to_mongodb( + mongos_client, + db_name="animals_database_2", + coll_name="horses", + content={"horse-breed": "pegasus", "real": True}, + ) + mongos_client.admin.command("movePrimary", "animals_database_2", to=SHARD_TWO_APP_NAME) + + # log into shard 1 verify data + shard_one_client = await generate_mongodb_client( + ops_test, app_name=SHARD_ONE_APP_NAME, mongos=False + ) + has_correct_data = verify_data_mongodb( + shard_one_client, + db_name="animals_database_1", + coll_name="horses", + key="horse-breed", + value="unicorn", + ) + assert has_correct_data, "data not written to shard-one" + + # log into shard 2 verify data + shard_two_client = await generate_mongodb_client( + ops_test, app_name=SHARD_TWO_APP_NAME, mongos=False + ) + has_correct_data = verify_data_mongodb( + shard_two_client, + db_name="animals_database_2", + coll_name="horses", + key="horse-breed", + value="pegasus", + ) + assert has_correct_data, "data not written to shard-two" diff --git a/tests/integration/sharding_tests/test_sharding_components.py b/tests/integration/sharding_tests/test_sharding_components.py deleted file mode 100644 index e4503f5b9..000000000 --- a/tests/integration/sharding_tests/test_sharding_components.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. -import pytest -from pytest_operator.plugin import OpsTest - -from ..helpers import get_password - -SHARD_ONE_APP_NAME = "shard-one" -CONFIG_SERVER_APP_NAME = "config-server-one" -SHARD_REL_NAME = "sharding" -CONFIG_SERVER_REL_NAME = "config-server" -MONGODB_KEYFILE_PATH = "/var/snap/charmed-mongodb/current/etc/mongod/keyFile" - -""" -Integration tests are not a requirement for our release goal of POC Sharding. However they are -useful for automating tests that developers must do by hand. This file currently exists to help -running tests during the development of sharding. - -TODO Future tests: -- shard can only relate to one config server -- shard cannot change passwords -""" - - -@pytest.mark.abort_on_fail -async def test_build_and_deploy(ops_test: OpsTest) -> None: - """Build and deploy a sharded cluster.""" - async with ops_test.fast_forward(): - my_charm = await ops_test.build_charm(".") - await ops_test.model.deploy( - my_charm, - num_units=3, - config={"role": "config-server"}, - application_name=CONFIG_SERVER_APP_NAME, - ) - await ops_test.model.deploy( - my_charm, num_units=3, config={"role": "shard"}, application_name=SHARD_ONE_APP_NAME - ) - await ops_test.model.wait_for_idle( - apps=[CONFIG_SERVER_APP_NAME, SHARD_ONE_APP_NAME], status="active" - ) - - await ops_test.model.integrate( - f"{SHARD_ONE_APP_NAME}:{SHARD_REL_NAME}", - f"{CONFIG_SERVER_APP_NAME}:{CONFIG_SERVER_REL_NAME}", - ) - await ops_test.model.wait_for_idle( - apps=[CONFIG_SERVER_APP_NAME, SHARD_ONE_APP_NAME], status="active" - ) - - -async def test_shared_operator_password(ops_test: OpsTest) -> None: - """Verify sharded components have the same passwords.""" - shard_password = await get_password(ops_test, SHARD_ONE_APP_NAME) - config_password = await get_password(ops_test, CONFIG_SERVER_APP_NAME) - assert ( - shard_password == config_password - ), "sharding components do not have the same operator password." - - -async def test_shared_keyfile(ops_test: OpsTest) -> None: - """Verify sharded components have the same keyfile contents.""" - config_unit = ops_test.model.applications[CONFIG_SERVER_APP_NAME].units[0] - config_key_file = await get_keyfile_contents(config_unit) - - shard_unit = ops_test.model.applications[SHARD_ONE_APP_NAME].units[0] - shard_key_file = await get_keyfile_contents(shard_unit) - - assert config_key_file == shard_key_file, "shards and config server using different keyfiles" - - -async def get_keyfile_contents(ops_test: OpsTest, unit) -> str: - cat_cmd = f"exec --unit {unit.name} -- cat {MONGODB_KEYFILE_PATH}" - return_code, output, _ = await ops_test.juju(*cat_cmd.split()) - - if return_code != 0: - raise ProcessError( - f"Expected cat command {cat_cmd} to succeed instead it failed: {return_code}" - ) - - -class ProcessError(Exception): - """Raised when a process fails.""" diff --git a/tox.ini b/tox.ini index c89fdca5b..8c038773f 100644 --- a/tox.ini +++ b/tox.ini @@ -177,6 +177,21 @@ deps = commands = pytest -v --tb native --log-cli-level=INFO -s --durations=0 {posargs} {[vars]tests_path}/integration/metrics_tests/test_metrics.py +[testenv:sharding-integration] +description = Run sharding integration tests +pass_env = + {[testenv]pass_env} + CI + CI_PACKED_CHARMS +deps = + pytest + juju==3.2.0.1 + pytest-mock + pytest-operator + -r {tox_root}/requirements.txt +commands = + pytest -v --tb native --log-cli-level=INFO -s --durations=0 {posargs} {[vars]tests_path}/integration/sharding_tests/test_sharding.py + [testenv:integration] description = Run all integration tests