From c6b838c402ccc4f026202baaa6600ebcd5832988 Mon Sep 17 00:00:00 2001 From: Dennis Felsing Date: Thu, 12 Dec 2024 14:03:21 +0000 Subject: [PATCH] Terraform test: Add AWS persistent --- ci/nightly/pipeline.template.yml | 1 + ci/qa-canary/pipeline.template.yml | 13 + .../lint-main/checks/check-mzcompose-files.sh | 1 + test/terraform/aws-persistent/main.tf | 102 ++ .../{aws => aws-persistent}/simple.tf | 0 test/terraform/{aws => aws-temporary}/main.tf | 0 test/terraform/aws-temporary/simple.tf | 14 + test/terraform/mzcompose.py | 895 ++++++++++-------- 8 files changed, 652 insertions(+), 374 deletions(-) create mode 100644 test/terraform/aws-persistent/main.tf rename test/terraform/{aws => aws-persistent}/simple.tf (100%) rename test/terraform/{aws => aws-temporary}/main.tf (100%) create mode 100644 test/terraform/aws-temporary/simple.tf diff --git a/ci/nightly/pipeline.template.yml b/ci/nightly/pipeline.template.yml index 7409938a1f91a..e2036033c4650 100644 --- a/ci/nightly/pipeline.template.yml +++ b/ci/nightly/pipeline.template.yml @@ -1136,6 +1136,7 @@ steps: - ./ci/plugins/scratch-aws-access: ~ - ./ci/plugins/mzcompose: composition: terraform + run: aws-temporary branches: "main v*.*" - group: "Output consistency" diff --git a/ci/qa-canary/pipeline.template.yml b/ci/qa-canary/pipeline.template.yml index ca37d38ab62e0..62501776f8af6 100644 --- a/ci/qa-canary/pipeline.template.yml +++ b/ci/qa-canary/pipeline.template.yml @@ -54,3 +54,16 @@ steps: - --benchmarking-env - --scenario=StagingBench - --sqlite-store + + - id: terraform-aws-persistent-test + label: "Terraform AWS Persistent Test" + timeout_in_minutes: 1440 # 24 hours + concurrency: 1 + concurrency_group: 'terraform-aws-persistent-test' + plugins: + - ./ci/plugins/mzcompose: + composition: terraform + run: aws-persistent-test + args: ["--runtime=82800"] # 23 hours + agents: + queue: linux-aarch64-small diff --git a/ci/test/lint-main/checks/check-mzcompose-files.sh b/ci/test/lint-main/checks/check-mzcompose-files.sh index 0031f92c3dc3d..66e136aaa9390 100755 --- a/ci/test/lint-main/checks/check-mzcompose-files.sh +++ b/ci/test/lint-main/checks/check-mzcompose-files.sh @@ -45,6 +45,7 @@ check_default_workflow_references_others() { -not -wholename "./test/canary-environment/mzcompose.py" `# Only run manually` \ -not -wholename "./test/ssh-connection/mzcompose.py" `# Handled differently` \ -not -wholename "./test/scalability/mzcompose.py" `# Other workflows are for manual usage` \ + -not -wholename "./test/terraform/mzcompose.py" `# Other workflows are for manual usage` \ ) for file in "${MZCOMPOSE_TEST_FILES[@]}"; do diff --git a/test/terraform/aws-persistent/main.tf b/test/terraform/aws-persistent/main.tf new file mode 100644 index 0000000000000..bb2c43102c7f0 --- /dev/null +++ b/test/terraform/aws-persistent/main.tf @@ -0,0 +1,102 @@ +# Copyright Materialize, Inc. and contributors. All rights reserved. +# +# Use of this software is governed by the Business Source License +# included in the LICENSE file at the root of this repository. +# +# As of the Change Date specified in that file, in accordance with +# the Business Source License, use of this software will be governed +# by the Apache License, Version 2.0. + +provider "aws" { + region = "us-east-1" +} + +module "materialize_infrastructure" { + source = "git::https://github.com/MaterializeInc/terraform-aws-materialize.git?ref=v0.1.3" + + # Basic settings + environment = "dev" + vpc_name = "aws-persistent-vpc" + cluster_name = "aws-persistent-cluster" + mz_iam_service_account_name = "aws-persistent-user" + mz_iam_role_name = "aws-persistent-s3-role" + + # VPC Configuration + vpc_cidr = "10.0.0.0/16" + availability_zones = ["us-east-1a", "us-east-1b"] + private_subnet_cidrs = ["10.0.1.0/24", "10.0.2.0/24"] + public_subnet_cidrs = ["10.0.101.0/24", "10.0.102.0/24"] + single_nat_gateway = true + + # EKS Configuration + cluster_version = "1.31" + node_group_instance_types = ["c7a.2xlarge"] + node_group_desired_size = 2 + node_group_min_size = 1 + node_group_max_size = 3 + node_group_capacity_type = "ON_DEMAND" + + # Storage Configuration + bucket_name = "aws-persistent-storage-${random_id.suffix.hex}" + enable_bucket_versioning = true + enable_bucket_encryption = true + bucket_force_destroy = true + + # Database Configuration + database_password = "zdUXjK4dRBBqBiTMK9gbkL9zPMYMSTsj" + db_identifier = "aws-persistent-metadata-db" + postgres_version = "15" + db_instance_class = "db.t3.micro" + db_allocated_storage = 20 + database_name = "materialize" + database_username = "materialize" + db_multi_az = false + + # Basic monitoring + enable_monitoring = true + metrics_retention_days = 30 + + # Tags + tags = { + Environment = "dev" + Project = "aws-persistent" + Terraform = "true" + } +} + +# Generate random suffix for unique S3 bucket name +resource "random_id" "suffix" { + byte_length = 4 +} + +# outputs.tf +output "eks_cluster_endpoint" { + description = "EKS cluster endpoint" + value = module.materialize_infrastructure.eks_cluster_endpoint +} + +output "database_endpoint" { + description = "RDS instance endpoint" + value = module.materialize_infrastructure.database_endpoint +} + +output "s3_bucket_name" { + description = "Name of the S3 bucket" + value = module.materialize_infrastructure.s3_bucket_name +} + +output "materialize_s3_role_arn" { + description = "The ARN of the IAM role for Materialize" + value = module.materialize_infrastructure.materialize_s3_role_arn +} + +output "metadata_backend_url" { + description = "PostgreSQL connection URL in the format required by Materialize" + value = module.materialize_infrastructure.metadata_backend_url + sensitive = true +} + +output "persist_backend_url" { + description = "S3 connection URL in the format required by Materialize using IRSA" + value = module.materialize_infrastructure.persist_backend_url +} diff --git a/test/terraform/aws/simple.tf b/test/terraform/aws-persistent/simple.tf similarity index 100% rename from test/terraform/aws/simple.tf rename to test/terraform/aws-persistent/simple.tf diff --git a/test/terraform/aws/main.tf b/test/terraform/aws-temporary/main.tf similarity index 100% rename from test/terraform/aws/main.tf rename to test/terraform/aws-temporary/main.tf diff --git a/test/terraform/aws-temporary/simple.tf b/test/terraform/aws-temporary/simple.tf new file mode 100644 index 0000000000000..31cbc8a43fc9b --- /dev/null +++ b/test/terraform/aws-temporary/simple.tf @@ -0,0 +1,14 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = ">= 5.75.0" + } + random = { + source = "hashicorp/random" + version = ">= 3.0" + } + } +} diff --git a/test/terraform/mzcompose.py b/test/terraform/mzcompose.py index 1d851b92d5b2d..159ebacfb86d2 100644 --- a/test/terraform/mzcompose.py +++ b/test/terraform/mzcompose.py @@ -31,7 +31,7 @@ from materialize.mzcompose.services.testdrive import Testdrive SERVICES = [ - Testdrive(), # Overridden below + Testdrive(), # overridden below ] @@ -46,7 +46,403 @@ def run_ignore_error( pass -def workflow_default(c: Composition, parser: WorkflowArgumentParser) -> None: +def testdrive(no_reset: bool) -> Testdrive: + return Testdrive( + materialize_url="postgres://materialize@127.0.0.1:6875/materialize", + materialize_url_internal="postgres://mz_system:materialize@127.0.0.1:6877/materialize", + materialize_use_https=True, + no_consistency_checks=True, + network_mode="host", + volume_workdir="../testdrive:/workdir", + no_reset=no_reset, + # For full testdrive support we'll need: + # kafka_url=... + # schema_registry_url=... + # aws_endpoint=... + ) + + +def get_tag(tag: str) -> str: + return tag or f"v{ci_util.get_mz_version()}--pr.g{git.rev_parse('HEAD')}" + + +class AWS: + materialize_environment: dict | None + path: Path + environmentd_port_forward_process: subprocess.Popen[bytes] | None + balancerd_port_forward_process: subprocess.Popen[bytes] | None + + def __init__(self, path: Path): + self.materialize_environment = None + self.path = path + self.environmentd_port_forward_process = None + self.balancerd_port_forward_process = None + + def setup(self, prefix: str, setup: bool, tag: str) -> None: + if not setup: + spawn.runv( + [ + "aws", + "eks", + "update-kubeconfig", + "--name", + f"{prefix}-cluster", + "--region", + "us-east-1", + ] + ) + return + + print("--- Setup") + spawn.runv(["terraform", "init"], cwd=self.path) + spawn.runv(["terraform", "validate"], cwd=self.path) + spawn.runv(["terraform", "plan"], cwd=self.path) + spawn.runv(["terraform", "apply", "-auto-approve"], cwd=self.path) + + metadata_backend_url = spawn.capture( + ["terraform", "output", "-raw", "metadata_backend_url"], cwd=self.path + ).strip() + persist_backend_url = spawn.capture( + ["terraform", "output", "-raw", "persist_backend_url"], cwd=self.path + ).strip() + materialize_s3_role_arn = spawn.capture( + ["terraform", "output", "-raw", "materialize_s3_role_arn"], cwd=self.path + ).strip() + + spawn.runv( + [ + "aws", + "eks", + "update-kubeconfig", + "--name", + f"{prefix}-cluster", + "--region", + "us-east-1", + ] + ) + + spawn.runv(["kubectl", "get", "nodes"]) + # Not working yet? + # spawn.runv( + # ["helm", "repo", "add", "openebs", "https://openebs.github.io/openebs"] + # ) + # spawn.runv(["helm", "repo", "update"]) + # spawn.runv( + # [ + # "helm", + # "install", + # "openebs", + # "--namespace", + # "openebs", + # "openebs/openebs", + # "--set", + # "engines.replicated.mayastor.enabled=false", + # "--create-namespace", + # ] + # ) + # spawn.runv( + # ["kubectl", "get", "pods", "-n", "openebs", "-l", "role=openebs-lvm"] + # ) + + aws_account_id = spawn.capture( + [ + "aws", + "sts", + "get-caller-identity", + "--query", + "Account", + "--output", + "text", + ] + ).strip() + public_ip_address = spawn.capture( + ["curl", "http://checkip.amazonaws.com"] + ).strip() + + materialize_values = { + "operator": { + "image": {"tag": tag}, + "cloudProvider": { + "type": "aws", + "region": "us-east-1", + "providers": { + "aws": { + "enabled": True, + "accountID": aws_account_id, + "iam": {"roles": {"environment": materialize_s3_role_arn}}, + } + }, + }, + }, + "rbac": {"enabled": False}, + "networkPolicies": { + "enabled": True, + "egress": {"enabled": True, "cidrs": ["0.0.0.0/0"]}, + "ingress": {"enabled": True, "cidrs": [f"{public_ip_address}/24"]}, + "internal": {"enabled": True}, + }, + } + + spawn.runv( + [ + "helm", + "install", + "materialize-operator", + "misc/helm-charts/operator", + "--namespace", + "materialize", + "--create-namespace", + "-f", + "-", + ], + cwd=MZ_ROOT, + stdin=yaml.dump(materialize_values).encode(), + ) + for i in range(60): + try: + spawn.runv( + ["kubectl", "get", "pods", "-n", "materialize"], + cwd=self.path, + ) + status = spawn.capture( + [ + "kubectl", + "get", + "pods", + "-n", + "materialize", + "-o", + "jsonpath={.items[0].status.phase}", + ], + cwd=self.path, + ) + if status == "Running": + break + except subprocess.CalledProcessError: + time.sleep(1) + else: + raise ValueError("Never completed") + + spawn.runv(["kubectl", "create", "namespace", "materialize-environment"]) + + materialize_backend_secret = { + "apiVersion": "v1", + "kind": "Secret", + "metadata": { + "name": "materialize-backend", + "namespace": "materialize-environment", + }, + "stringData": { + "metadata_backend_url": metadata_backend_url, + "persist_backend_url": persist_backend_url, + }, + } + + spawn.runv( + ["kubectl", "apply", "-f", "-"], + cwd=self.path, + stdin=yaml.dump(materialize_backend_secret).encode(), + ) + + self.materialize_environment = { + "apiVersion": "materialize.cloud/v1alpha1", + "kind": "Materialize", + "metadata": { + "name": "12345678-1234-1234-1234-123456789012", + "namespace": "materialize-environment", + }, + "spec": { + "environmentdImageRef": f"materialize/environmentd:{tag}", + "environmentdResourceRequirements": { + "limits": {"memory": "4Gi"}, + "requests": {"cpu": "2", "memory": "4Gi"}, + }, + "balancerdResourceRequirements": { + "limits": {"memory": "256Mi"}, + "requests": {"cpu": "100m", "memory": "256Mi"}, + }, + "backendSecretName": "materialize-backend", + }, + } + + spawn.runv( + ["kubectl", "apply", "-f", "-"], + cwd=self.path, + stdin=yaml.dump(self.materialize_environment).encode(), + ) + for i in range(60): + try: + spawn.runv( + [ + "kubectl", + "get", + "materializes", + "-n", + "materialize-environment", + ], + cwd=self.path, + ) + break + except subprocess.CalledProcessError: + time.sleep(1) + else: + raise ValueError("Never completed") + for i in range(180): + try: + spawn.runv( + ["kubectl", "get", "pods", "-n", "materialize-environment"], + cwd=self.path, + ) + status = spawn.capture( + [ + "kubectl", + "get", + "pods", + "-l", + "app=environmentd", + "-n", + "materialize-environment", + "-o", + "jsonpath={.items[0].status.phase}", + ], + cwd=self.path, + ) + if status == "Running": + break + except subprocess.CalledProcessError: + time.sleep(1) + else: + raise ValueError("Never completed") + + # Can take a while for balancerd to come up + for i in range(240): + try: + status = spawn.capture( + [ + "kubectl", + "get", + "pods", + "-l", + "app=balancerd", + "-n", + "materialize-environment", + "-o", + "jsonpath={.items[0].status.phase}", + ], + cwd=self.path, + ) + if status == "Running": + break + except subprocess.CalledProcessError: + time.sleep(1) + else: + raise ValueError("Never completed") + + def connect(self, c: Composition) -> None: + environmentd_name = spawn.capture( + [ + "kubectl", + "get", + "pods", + "-l", + "app=environmentd", + "-n", + "materialize-environment", + "-o", + "jsonpath={.items[*].metadata.name}", + ], + cwd=self.path, + ) + + balancerd_name = spawn.capture( + [ + "kubectl", + "get", + "pods", + "-l", + "app=balancerd", + "-n", + "materialize-environment", + "-o", + "jsonpath={.items[*].metadata.name}", + ], + cwd=self.path, + ) + # error: arguments in resource/name form must have a single resource and name + print(f"Got balancerd name: {balancerd_name}") + + self.environmentd_port_forward_process = subprocess.Popen( + [ + "kubectl", + "port-forward", + f"pod/{environmentd_name}", + "-n", + "materialize-environment", + "6877:6877", + "6878:6878", + ], + preexec_fn=os.setpgrp, + ) + self.balancerd_port_forward_process = subprocess.Popen( + [ + "kubectl", + "port-forward", + f"pod/{balancerd_name}", + "-n", + "materialize-environment", + "6875:6875", + "6876:6876", + ], + preexec_fn=os.setpgrp, + ) + time.sleep(10) + + c.up("testdrive", persistent=True) + c.testdrive( + dedent( + """ + > SELECT 1 + 1 + """ + ) + ) + + def cleanup(self) -> None: + if self.environmentd_port_forward_process: + os.killpg( + os.getpgid(self.environmentd_port_forward_process.pid), signal.SIGTERM + ) + if self.balancerd_port_forward_process: + os.killpg( + os.getpgid(self.balancerd_port_forward_process.pid), signal.SIGTERM + ) + + def destroy(self) -> None: + print("--- Destroying") + if self.materialize_environment: + run_ignore_error( + ["kubectl", "delete", "-f", "-"], + cwd=self.path, + stdin=yaml.dump(self.materialize_environment).encode(), + ) + run_ignore_error( + [ + "kubectl", + "delete", + "materialize.materialize.cloud/12345678-1234-1234-1234-123456789012", + "-n" "materialize-environment", + ] + ) + run_ignore_error(["kubectl", "delete", "namespace", "materialize-environment"]) + run_ignore_error( + ["helm", "uninstall", "materialize-operator"], + cwd=self.path, + ) + run_ignore_error(["kubectl", "delete", "namespace", "materialize"]) + spawn.runv(["terraform", "destroy", "-auto-approve"], cwd=self.path) + + +def workflow_aws_temporary(c: Composition, parser: WorkflowArgumentParser) -> None: """To run locally use `aws sso login` first.""" parser.add_argument( "--setup", @@ -135,355 +531,14 @@ def workflow_default(c: Composition, parser: WorkflowArgumentParser) -> None: args = parser.parse_args() - tag = args.tag or f"v{ci_util.get_mz_version()}--pr.g{git.rev_parse('HEAD')}" - materialize_environment = None - environmentd_port_forward_process = None - balancerd_port_forward_process = None - - path = MZ_ROOT / "test" / "terraform" / "aws" + tag = get_tag(args.tag) + path = MZ_ROOT / "test" / "terraform" / "aws-temporary" + aws = AWS(path) try: - if args.setup: - print("--- Setup") - spawn.runv(["terraform", "init"], cwd=path) - spawn.runv(["terraform", "validate"], cwd=path) - spawn.runv(["terraform", "plan"], cwd=path) - spawn.runv(["terraform", "apply", "-auto-approve"], cwd=path) - - metadata_backend_url = spawn.capture( - ["terraform", "output", "-raw", "metadata_backend_url"], cwd=path - ).strip() - persist_backend_url = spawn.capture( - ["terraform", "output", "-raw", "persist_backend_url"], cwd=path - ).strip() - materialize_s3_role_arn = spawn.capture( - ["terraform", "output", "-raw", "materialize_s3_role_arn"], cwd=path - ).strip() - - spawn.runv( - [ - "aws", - "eks", - "update-kubeconfig", - "--name", - "terraform-aws-test-cluster", - "--region", - "us-east-1", - ] - ) - - spawn.runv(["kubectl", "get", "nodes"]) - # Not working yet? - # spawn.runv( - # ["helm", "repo", "add", "openebs", "https://openebs.github.io/openebs"] - # ) - # spawn.runv(["helm", "repo", "update"]) - # spawn.runv( - # [ - # "helm", - # "install", - # "openebs", - # "--namespace", - # "openebs", - # "openebs/openebs", - # "--set", - # "engines.replicated.mayastor.enabled=false", - # "--create-namespace", - # ] - # ) - # spawn.runv( - # ["kubectl", "get", "pods", "-n", "openebs", "-l", "role=openebs-lvm"] - # ) - - aws_account_id = spawn.capture( - [ - "aws", - "sts", - "get-caller-identity", - "--query", - "Account", - "--output", - "text", - ] - ).strip() - public_ip_address = spawn.capture( - ["curl", "http://checkip.amazonaws.com"] - ).strip() - - materialize_values = { - "operator": { - "image": {"tag": tag}, - "cloudProvider": { - "type": "aws", - "region": "us-east-1", - "providers": { - "aws": { - "enabled": True, - "accountID": aws_account_id, - "iam": { - "roles": {"environment": materialize_s3_role_arn} - }, - } - }, - }, - }, - "rbac": {"enabled": False}, - "networkPolicies": { - "enabled": True, - "egress": {"enabled": True, "cidrs": ["0.0.0.0/0"]}, - "ingress": {"enabled": True, "cidrs": [f"{public_ip_address}/24"]}, - "internal": {"enabled": True}, - }, - } - - spawn.runv( - [ - "helm", - "install", - "materialize-operator", - "misc/helm-charts/operator", - "--namespace", - "materialize", - "--create-namespace", - "-f", - "-", - ], - cwd=MZ_ROOT, - stdin=yaml.dump(materialize_values).encode(), - ) - for i in range(60): - try: - spawn.runv( - ["kubectl", "get", "pods", "-n", "materialize"], - cwd=path, - ) - status = spawn.capture( - [ - "kubectl", - "get", - "pods", - "-n", - "materialize", - "-o", - "jsonpath={.items[0].status.phase}", - ], - cwd=path, - ) - if status == "Running": - break - except subprocess.CalledProcessError: - time.sleep(1) - else: - raise ValueError("Never completed") - - spawn.runv(["kubectl", "create", "namespace", "materialize-environment"]) - - materialize_backend_secret = { - "apiVersion": "v1", - "kind": "Secret", - "metadata": { - "name": "materialize-backend", - "namespace": "materialize-environment", - }, - "stringData": { - "metadata_backend_url": metadata_backend_url, - "persist_backend_url": persist_backend_url, - }, - } - - spawn.runv( - ["kubectl", "apply", "-f", "-"], - cwd=path, - stdin=yaml.dump(materialize_backend_secret).encode(), - ) - - materialize_environment = { - "apiVersion": "materialize.cloud/v1alpha1", - "kind": "Materialize", - "metadata": { - "name": "12345678-1234-1234-1234-123456789012", - "namespace": "materialize-environment", - }, - "spec": { - "environmentdImageRef": f"materialize/environmentd:{tag}", - "environmentdResourceRequirements": { - "limits": {"memory": "4Gi"}, - "requests": {"cpu": "2", "memory": "4Gi"}, - }, - "balancerdResourceRequirements": { - "limits": {"memory": "256Mi"}, - "requests": {"cpu": "100m", "memory": "256Mi"}, - }, - "backendSecretName": "materialize-backend", - }, - } - - spawn.runv( - ["kubectl", "apply", "-f", "-"], - cwd=path, - stdin=yaml.dump(materialize_environment).encode(), - ) - for i in range(60): - try: - spawn.runv( - [ - "kubectl", - "get", - "materializes", - "-n", - "materialize-environment", - ], - cwd=path, - ) - break - except subprocess.CalledProcessError: - time.sleep(1) - else: - raise ValueError("Never completed") - for i in range(180): - try: - spawn.runv( - ["kubectl", "get", "pods", "-n", "materialize-environment"], - cwd=path, - ) - status = spawn.capture( - [ - "kubectl", - "get", - "pods", - "-l", - "app=environmentd", - "-n", - "materialize-environment", - "-o", - "jsonpath={.items[0].status.phase}", - ], - cwd=path, - ) - if status == "Running": - break - except subprocess.CalledProcessError: - time.sleep(1) - else: - raise ValueError("Never completed") - - # Can take a while for balancerd to come up - for i in range(240): - try: - status = spawn.capture( - [ - "kubectl", - "get", - "pods", - "-l", - "app=balancerd", - "-n", - "materialize-environment", - "-o", - "jsonpath={.items[0].status.phase}", - ], - cwd=path, - ) - if status == "Running": - break - except subprocess.CalledProcessError: - time.sleep(1) - else: - raise ValueError("Never completed") - else: - spawn.runv( - [ - "aws", - "eks", - "update-kubeconfig", - "--name", - "terraform-aws-test-cluster", - "--region", - "us-east-1", - ] - ) - + aws.setup("terraform-aws-test", args.setup, tag) print("--- Running tests") - environmentd_name = spawn.capture( - [ - "kubectl", - "get", - "pods", - "-l", - "app=environmentd", - "-n", - "materialize-environment", - "-o", - "jsonpath={.items[*].metadata.name}", - ], - cwd=path, - ) - - balancerd_name = spawn.capture( - [ - "kubectl", - "get", - "pods", - "-l", - "app=balancerd", - "-n", - "materialize-environment", - "-o", - "jsonpath={.items[*].metadata.name}", - ], - cwd=path, - ) - # error: arguments in resource/name form must have a single resource and name - print(f"Got balancerd name: {balancerd_name}") - - environmentd_port_forward_process = subprocess.Popen( - [ - "kubectl", - "port-forward", - f"pod/{environmentd_name}", - "-n", - "materialize-environment", - "6877:6877", - "6878:6878", - ], - preexec_fn=os.setpgrp, - ) - balancerd_port_forward_process = subprocess.Popen( - [ - "kubectl", - "port-forward", - f"pod/{balancerd_name}", - "-n", - "materialize-environment", - "6875:6875", - "6876:6876", - ], - preexec_fn=os.setpgrp, - ) - time.sleep(10) - - with c.override( - Testdrive( - materialize_url="postgres://materialize@127.0.0.1:6875/materialize", - materialize_url_internal="postgres://mz_system:materialize@127.0.0.1:6877/materialize", - materialize_use_https=True, - no_consistency_checks=True, - network_mode="host", - volume_workdir="../testdrive:/workdir", - # For full testdrive support we'll need: - # kafka_url=... - # schema_registry_url=... - # aws_endpoint=... - ) - ): - c.up("testdrive", persistent=True) - c.testdrive( - dedent( - """ - > SELECT 1 - 1 - """ - ) - ) + with c.override(testdrive(no_reset=False)): + aws.connect(c) with psycopg.connect( "postgres://materialize@127.0.0.1:6875/materialize" @@ -504,33 +559,125 @@ def workflow_default(c: Composition, parser: WorkflowArgumentParser) -> None: c.run_testdrive_files(*args.files) finally: - if environmentd_port_forward_process: - os.killpg(os.getpgid(environmentd_port_forward_process.pid), signal.SIGTERM) - if balancerd_port_forward_process: - os.killpg(os.getpgid(balancerd_port_forward_process.pid), signal.SIGTERM) + aws.cleanup() if args.cleanup: - print("--- Cleaning up") - if materialize_environment: - run_ignore_error( - ["kubectl", "delete", "-f", "-"], - cwd=path, - stdin=yaml.dump(materialize_environment).encode(), + aws.destroy() + + +PATH_AWS_PERSISTENT = MZ_ROOT / "test" / "terraform" / "aws-persistent" +PREFIX_AWS_PERSISTENT = "aws-persistent" + + +def workflow_aws_persistent_setup( + c: Composition, parser: WorkflowArgumentParser +) -> None: + """Setup the AWS persistent Terraform and Helm Chart""" + parser.add_argument( + "--tag", + type=str, + help="Custom version tag to use", + ) + + args = parser.parse_args() + + tag = get_tag(args.tag) + aws = AWS(PATH_AWS_PERSISTENT) + try: + aws.setup(PREFIX_AWS_PERSISTENT, True, tag) + with c.override(testdrive(no_reset=True)): + aws.connect(c) + c.testdrive( + dedent( + """ + > CREATE SOURCE counter FROM LOAD GENERATOR COUNTER + > CREATE TABLE table (c INT) + > CREATE MATERIALIZED VIEW mv AS SELECT count(*) FROM table + """ ) - run_ignore_error( - [ - "kubectl", - "delete", - "materialize.materialize.cloud/12345678-1234-1234-1234-123456789012", - "-n" "materialize-environment", - ] ) - run_ignore_error( - ["kubectl", "delete", "namespace", "materialize-environment"] - ) - run_ignore_error( - ["helm", "uninstall", "materialize-operator"], - cwd=path, + finally: + aws.cleanup() + + +def workflow_aws_persistent_test( + c: Composition, parser: WorkflowArgumentParser +) -> None: + """Run a test workload against the AWS persistent setup""" + parser.add_argument( + "--tag", + type=str, + help="Custom version tag to use", + ) + + parser.add_argument("--runtime", default=600, type=int, help="Runtime in seconds") + + args = parser.parse_args() + + start_time = time.time() + + tag = get_tag(args.tag) + aws = AWS(PATH_AWS_PERSISTENT) + try: + aws.setup(PREFIX_AWS_PERSISTENT, False, tag) + with c.override(testdrive(no_reset=True)): + aws.connect(c) + + count = 1 + + c.testdrive( + dedent( + """ + > DELETE FROM table + """ + ) ) - run_ignore_error(["kubectl", "delete", "namespace", "materialize"]) - spawn.runv(["terraform", "destroy", "-auto-approve"], cwd=path) + + while time.time() - start_time < args.runtime: + c.testdrive( + dedent( + f""" + > SELECT 1 + 1 + + > INSERT INTO table VALUES ({count}) + + > SELECT count(*) FROM table + {count} + + > SELECT * FROM mv + {count} + + > DROP VIEW IF EXISTS temp + + > CREATE VIEW temp AS SELECT * FROM mv + + > SELECT * FROM temp + {count} + """ + ) + ) + + count += 1 + + with psycopg.connect( + "postgres://materialize@127.0.0.1:6875/materialize", autocommit=True + ) as conn: + with conn.cursor() as cur: + cur.execute("SELECT max(counter) FROM counter") + old_max = cur.fetchall()[0][0] + time.sleep(5) + with conn.cursor() as cur: + cur.execute("SELECT max(counter) FROM counter") + new_max = cur.fetchall()[0][0] + assert new_max > old_max, f"{new_max} should be greater than {old_max}" + finally: + aws.cleanup() + + +def workflow_aws_persistent_destroy( + c: Composition, parser: WorkflowArgumentParser +) -> None: + """Setup the AWS persistent Terraform and Helm Chart""" + aws = AWS(PATH_AWS_PERSISTENT) + aws.destroy()