diff --git a/.circleci/config.yml b/.circleci/config.yml index be1841a9..0bb5646e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -42,7 +42,7 @@ jobs: paths: - $HOME/.go_workspace/src/github.com/hashicorp/terraform-aws-vault/test/vendor - run: mkdir -p /tmp/logs - - run: run-go-tests --path test | tee /tmp/logs/all.log + - run: run-go-tests --path test --timeout 90m | tee /tmp/logs/all.log - run: command: terratest_log_parser --testlog /tmp/logs/all.log --outputdir /tmp/logs when: always diff --git a/examples/vault-dynamodb-backend/README.md b/examples/vault-dynamodb-backend/README.md new file mode 100644 index 00000000..2249ed2b --- /dev/null +++ b/examples/vault-dynamodb-backend/README.md @@ -0,0 +1,45 @@ +# Vault Cluster with DynamoDB backend example + +This folder shows an example of Terraform code to deploy a [Vault](https://www.vaultproject.io/) cluster in +[AWS](https://aws.amazon.com/) using the [vault-cluster module](https://github.com/hashicorp/terraform-aws-vault/tree/master/modules/vault-cluster). +The Vault cluster uses [DynamoDB](https://aws.amazon.com/dynamodb/) as a high-availability storage backend and [S3](https://aws.amazon.com/s3/) +for durable storage, so this example also deploys a separate DynamoDB table + +This example creates a Vault cluster spread across the subnets in the default VPC of the AWS account. For an example of a Vault cluster +that is publicly accessible, see [the root example](https://github.com/hashicorp/terraform-aws-vault/tree/master/examples/root-example). + +![Vault architecture](https://github.com/hashicorp/terraform-aws-vault/blob/master/_docs/architecture-with-dynamodb.png?raw=true) + +You will need to create an [Amazon Machine Image (AMI)](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/AMIs.html) +that has Vault installed, which you can do using the [vault-consul-ami example](https://github.com/hashicorp/terraform-aws-vault/tree/master/examples/vault-consul-ami)). + +For more info on how the Vault cluster works, check out the [vault-cluster](https://github.com/hashicorp/terraform-aws-vault/tree/master/modules/vault-cluster) documentation. + +**Note**: To keep this example as simple to deploy and test as possible, it deploys the Vault cluster into your default +VPC and default subnets, some of which might be publicly accessible. This is OK for learning and experimenting, but for +production usage, we strongly recommend deploying the Vault cluster into the private subnets of a custom VPC. + + + + +## Quick start + +To deploy a Vault Cluster: + +1. `git clone` this repo to your computer. +1. Optional: build a Vault and Consul AMI. See the [vault-consul-ami + example](https://github.com/hashicorp/terraform-aws-vault/tree/master/examples/vault-consul-ami) documentation for + instructions. Make sure to note down the ID of the AMI. +1. Install [Terraform](https://www.terraform.io/). +1. Open `variables.tf`, set the environment variables specified at the top of the file, and fill in any other variables that + don't have a default. If you built a custom AMI, put the AMI ID into the `ami_id` variable. Otherwise, one of our + public example AMIs will be used by default. These AMIs are great for learning/experimenting, but are NOT + recommended for production use. +1. Run `terraform init`. +1. Run `terraform apply`. +1. Run the [vault-examples-helper.sh script](https://github.com/hashicorp/terraform-aws-vault/tree/master/examples/vault-examples-helper/vault-examples-helper.sh) to + print out the IP addresses of the Vault servers and some example commands you can run to interact with the cluster: + `../vault-examples-helper/vault-examples-helper.sh`. + +To see how to connect to the Vault cluster, initialize it, and start reading and writing secrets, head over to the +[How do you use the Vault cluster?](https://github.com/hashicorp/terraform-aws-vault/tree/master/modules/vault-cluster#how-do-you-use-the-vault-cluster) docs. diff --git a/examples/vault-dynamodb-backend/dynamodb/main.tf b/examples/vault-dynamodb-backend/dynamodb/main.tf new file mode 100644 index 00000000..7405fba8 --- /dev/null +++ b/examples/vault-dynamodb-backend/dynamodb/main.tf @@ -0,0 +1,17 @@ +resource "aws_dynamodb_table" "vault_dynamo" { + name = var.table_name + hash_key = "Path" + range_key = "Key" + read_capacity = var.read_capacity + write_capacity = var.write_capacity + + attribute { + name = "Path" + type = "S" + } + + attribute { + name = "Key" + type = "S" + } +} diff --git a/examples/vault-dynamodb-backend/dynamodb/variables.tf b/examples/vault-dynamodb-backend/dynamodb/variables.tf new file mode 100644 index 00000000..c48d524c --- /dev/null +++ b/examples/vault-dynamodb-backend/dynamodb/variables.tf @@ -0,0 +1,18 @@ +# --------------------------------------------------------------------------------------------------------------------- +# REQUIRED PARAMETERS +# You must provide a value for each of these parameters. +# --------------------------------------------------------------------------------------------------------------------- + +variable "table_name" { + description = "The name of the Dynamo Table to create and use as a storage backend." +} + +variable "read_capacity" { + description = "Sets the DynamoDB read capacity for storage backend" + default = 5 +} + +variable "write_capacity" { + description = "Sets the DynamoDB write capacity for storage backend" + default = 5 +} diff --git a/examples/vault-dynamodb-backend/main.tf b/examples/vault-dynamodb-backend/main.tf new file mode 100644 index 00000000..1452cad7 --- /dev/null +++ b/examples/vault-dynamodb-backend/main.tf @@ -0,0 +1,76 @@ +# ---------------------------------------------------------------------------------------------------------------------- +# REQUIRE A SPECIFIC TERRAFORM VERSION OR HIGHER +# This module has been updated with 0.12 syntax, which means it is no longer compatible with any versions below 0.12. +# ---------------------------------------------------------------------------------------------------------------------- +terraform { + required_version = ">= 0.12" +} + +# --------------------------------------------------------------------------------------------------------------------- +# DEPLOY THE VAULT SERVER CLUSTER +# --------------------------------------------------------------------------------------------------------------------- + +module "backend" { + source = "./dynamodb" + table_name = var.dynamo_table_name + read_capacity = var.dynamo_read_capacity + write_capacity = var.dynamo_write_capacity +} + +module "vault_cluster" { + # When using these modules in your own templates, you will need to use a Git URL with a ref attribute that pins you + # to a specific version of the modules, such as the following example: + # source = "github.com/hashicorp/terraform-aws-vault.git//modules/vault-cluster?ref=v0.0.1" + source = "../../modules/vault-cluster" + + cluster_name = var.vault_cluster_name + cluster_size = var.vault_cluster_size + instance_type = var.vault_instance_type + + ami_id = var.ami_id + user_data = data.template_file.user_data_vault_cluster.rendered + + vpc_id = data.aws_vpc.default.id + subnet_ids = data.aws_subnet_ids.default.ids + + # To make testing easier, we allow requests from any IP address here but in a production deployment, we *strongly* + # recommend you limit this to the IP address ranges of known, trusted servers inside your VPC. + + allowed_ssh_cidr_blocks = ["0.0.0.0/0"] + allowed_inbound_cidr_blocks = ["0.0.0.0/0"] + allowed_inbound_security_group_ids = [] + allowed_inbound_security_group_count = 0 + ssh_key_name = var.ssh_key_name + + enable_dynamo_backend = true + dynamo_table_name = var.dynamo_table_name +} + +data "template_file" "user_data_vault_cluster" { + template = file("${path.module}/user-data-vault.sh") + + vars = { + aws_region = data.aws_region.current.name + dynamo_table_name = var.dynamo_table_name + } +} + +# --------------------------------------------------------------------------------------------------------------------- +# DEPLOY THE CLUSTERS IN THE DEFAULT VPC AND AVAILABILITY ZONES +# Using the default VPC and subnets makes this example easy to run and test, but it means Vault is +# accessible from the public Internet. In a production deployment, we strongly recommend deploying into a custom VPC +# and private subnets. +# --------------------------------------------------------------------------------------------------------------------- + +data "aws_vpc" "default" { + default = var.vpc_id == null ? true : false + id = var.vpc_id +} + +data "aws_subnet_ids" "default" { + vpc_id = data.aws_vpc.default.id +} + +data "aws_region" "current" { +} + diff --git a/examples/vault-dynamodb-backend/outputs.tf b/examples/vault-dynamodb-backend/outputs.tf new file mode 100644 index 00000000..f57334a2 --- /dev/null +++ b/examples/vault-dynamodb-backend/outputs.tf @@ -0,0 +1,39 @@ +output "asg_name_vault_cluster" { + value = module.vault_cluster.asg_name +} + +output "launch_config_name_vault_cluster" { + value = module.vault_cluster.launch_config_name +} + +output "iam_role_arn_vault_cluster" { + value = module.vault_cluster.iam_role_arn +} + +output "iam_role_id_vault_cluster" { + value = module.vault_cluster.iam_role_id +} + +output "security_group_id_vault_cluster" { + value = module.vault_cluster.security_group_id +} + +output "aws_region" { + value = data.aws_region.current.name +} + +output "vault_servers_cluster_tag_key" { + value = module.vault_cluster.cluster_tag_key +} + +output "vault_servers_cluster_tag_value" { + value = module.vault_cluster.cluster_tag_value +} + +output "ssh_key_name" { + value = var.ssh_key_name +} + +output "vault_cluster_size" { + value = var.vault_cluster_size +} diff --git a/examples/vault-dynamodb-backend/user-data-vault.sh b/examples/vault-dynamodb-backend/user-data-vault.sh new file mode 100644 index 00000000..6ff712c8 --- /dev/null +++ b/examples/vault-dynamodb-backend/user-data-vault.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# This script is meant to be run in the User Data of each EC2 Instance while it's booting. The script uses the +# run-vault script to configure and start Vault in server mode. +# Note that this script assumes it's running in an AMI built from the Packer template in +# examples/vault-consul-ami/vault-consul.json. + +set -e + +# Send the log output from this script to user-data.log, syslog, and the console +# From: https://alestic.com/2010/12/ec2-user-data-output/ +exec > >(tee /var/log/user-data.log|logger -t user-data -s 2>/dev/console) 2>&1 + +# The Packer template puts the TLS certs in these file paths +readonly VAULT_TLS_CERT_FILE="/opt/vault/tls/vault.crt.pem" +readonly VAULT_TLS_KEY_FILE="/opt/vault/tls/vault.key.pem" + +# The variables below are filled in via Terraform interpolation +/opt/vault/bin/run-vault \ + --enable-dynamo-backend \ + --dynamo-table "${dynamo_table_name}" \ + --dynamo-region "${aws_region}" \ + --tls-cert-file "$VAULT_TLS_CERT_FILE" \ + --tls-key-file "$VAULT_TLS_KEY_FILE" \ No newline at end of file diff --git a/examples/vault-dynamodb-backend/variables.tf b/examples/vault-dynamodb-backend/variables.tf new file mode 100644 index 00000000..928f9b9e --- /dev/null +++ b/examples/vault-dynamodb-backend/variables.tf @@ -0,0 +1,67 @@ +# --------------------------------------------------------------------------------------------------------------------- +# ENVIRONMENT VARIABLES +# Define these secrets as environment variables +# --------------------------------------------------------------------------------------------------------------------- + +# AWS_ACCESS_KEY_ID +# AWS_SECRET_ACCESS_KEY +# AWS_DEFAULT_REGION + +# --------------------------------------------------------------------------------------------------------------------- +# REQUIRED PARAMETERS +# You must provide a value for each of these parameters. +# --------------------------------------------------------------------------------------------------------------------- + +variable "ami_id" { + description = "The ID of the AMI to run in the cluster. This should be an AMI built from the Packer template under examples/vault-consul-ami/vault-consul.json." + type = string +} + +variable "ssh_key_name" { + description = "The name of an EC2 Key Pair that can be used to SSH to the EC2 Instances in this cluster. Set to an empty string to not associate a Key Pair." + type = string +} + +# --------------------------------------------------------------------------------------------------------------------- +# OPTIONAL PARAMETERS +# These parameters have reasonable defaults. +# --------------------------------------------------------------------------------------------------------------------- + +variable "vault_cluster_name" { + description = "What to name the Vault server cluster and all of its associated resources" + type = string + default = "vault-dynamo-example" +} + +variable "vault_cluster_size" { + description = "The number of Vault server nodes to deploy. We strongly recommend using 3 or 5." + type = number + default = 3 +} + +variable "vault_instance_type" { + description = "The type of EC2 Instance to run in the Vault ASG" + type = string + default = "t2.micro" +} + +variable "vpc_id" { + description = "The ID of the VPC to deploy into. Leave an empty string to use the Default VPC in this region." + type = string + default = null +} + +variable "dynamo_table_name" { + description = "The name of the Dynamo Table to create and use as a storage backend. Only used if 'enable_dynamo_backend' is set to true." + default = "my-vault-table" +} + +variable "dynamo_read_capacity" { + description = "Sets the DynamoDB read capacity for storage backend" + default = 5 +} + +variable "dynamo_write_capacity" { + description = "Sets the DynamoDB write capacity for storage backend" + default = 5 +} diff --git a/modules/run-vault/run-vault b/modules/run-vault/run-vault index 192feb7a..211ad972 100755 --- a/modules/run-vault/run-vault +++ b/modules/run-vault/run-vault @@ -43,6 +43,9 @@ function print_usage { echo -e " --s3-bucket\tSpecifies the S3 bucket to use to store Vault data. Only used if '--enable-s3-backend' is set." echo -e " --s3-bucket-path\tSpecifies the S3 bucket path to use to store Vault data. Only used if '--enable-s3-backend' is set." echo -e " --s3-bucket-region\tSpecifies the AWS region where '--s3-bucket' lives. Only used if '--enable-s3-backend' is set." + echo -e " --enable-dynamo-backend\tIf this flag is set, DynamoDB will be enabled as the backend storage (HA)" + echo -e " --dynamo-region\tSpecifies the AWS region where --dynamo-table lives. Only used if '--enable-dynamo-backend is on'" + echo -e " --dynamo--table\tSpecifies the DynamoDB table to use for HA Storage. Only used if '--enable-dynamo-backend is on'" echo echo "Options for Vault Agent:" echo @@ -233,10 +236,13 @@ function generate_vault_config { local -r s3_bucket="$9" local -r s3_bucket_path="${10}" local -r s3_bucket_region="${11}" - local -r enable_auto_unseal="${12}" - local -r auto_unseal_kms_key_id="${13}" - local -r auto_unseal_kms_key_region="${14}" - local -r auto_unseal_endpoint="${15}" + local -r enable_dynamo_backend="${12}" + local -r dynamo_region="${13}" + local -r dynamo_table="${14}" + local -r enable_auto_unseal="${15}" + local -r auto_unseal_kms_key_id="${16}" + local -r auto_unseal_kms_key_region="${17}" + local -r auto_unseal_endpoint="${18}" local -r config_path="$config_dir/$VAULT_CONFIG_FILE" local instance_ip_address @@ -278,7 +284,9 @@ EOF ) local consul_storage_type="storage" + local dynamodb_storage_type="storage" local s3_config="" + local vault_storage_backend="" if [[ "$enable_s3_backend" == "true" ]]; then s3_config=$(cat <> "$config_path" echo -e "$listener_config" >> "$config_path" echo -e "$s3_config" >> "$config_path" - echo -e "$consul_storage" >> "$config_path" + echo -e "$vault_storage_backend" >> "$config_path" chown "$user:$user" "$config_path" } @@ -425,6 +447,9 @@ function run { local s3_bucket="" local s3_bucket_path="" local s3_bucket_region="" + local enable_dynamo_backend="false" + local dynamo_region="" + local dynamo_table="" local agent="false" local agent_vault_address="$DEFAULT_AGENT_VAULT_ADDRESS" local agent_vault_port="$DEFAULT_PORT" @@ -515,6 +540,17 @@ function run { s3_bucket_region="$2" shift ;; + --enable-dynamo-backend) + enable_dynamo_backend="true" + ;; + --dynamo-region) + dynamo_region="$2" + shift + ;; + --dynamo-table) + dynamo_table="$2" + shift + ;; --agent) agent="true" ;; @@ -598,6 +634,11 @@ function run { assert_not_empty "--s3-bucket-region" "$s3_bucket_region" fi fi + + if [[ "$enable_dynamo_backend" == "true" ]]; then + assert_not_empty "--dynamo-table" "$dynamo_table" + assert_not_empty "--dynamo-region" "$dynamo_region" + fi assert_is_installed "systemctl" assert_is_installed "aws" @@ -666,6 +707,9 @@ function run { "$s3_bucket" \ "$s3_bucket_path" \ "$s3_bucket_region" \ + "$enable_dynamo_backend" \ + "$dynamo_region" \ + "$dynamo_table" \ "$enable_auto_unseal" \ "$auto_unseal_kms_key_id" \ "$auto_unseal_kms_key_region" \ diff --git a/modules/vault-cluster/main.tf b/modules/vault-cluster/main.tf index d8e6b486..6838267c 100644 --- a/modules/vault-cluster/main.tf +++ b/modules/vault-cluster/main.tf @@ -6,6 +6,10 @@ terraform { required_version = ">= 0.12" } +data "aws_region" "current" {} + +data "aws_caller_identity" "current" {} + # --------------------------------------------------------------------------------------------------------------------- # CREATE AN AUTO SCALING GROUP (ASG) TO RUN VAULT # --------------------------------------------------------------------------------------------------------------------- @@ -52,6 +56,17 @@ resource "aws_autoscaling_group" "autoscaling_group" { propagate_at_launch = true } + + # Use table policy name in tags for depending on them when they are there + # And only create the cluster after dynamo exists + # Otherwise Vault might boot and not find the bucket or not yet have the necessary permissions + # Not using `depends_on` because these resources might not exist + tag { + key = "using_dynamodb_backend" + value = element(concat(aws_iam_role_policy.vault_dynamo.*.name, [""]), 0) + propagate_at_launch = true + } + tag { key = "using_auto_unseal" value = element(concat(aws_iam_role_policy.vault_auto_unseal_kms.*.name, [""]), 0) @@ -294,6 +309,49 @@ data "aws_iam_policy_document" "vault_s3" { } } +data "aws_iam_policy_document" "vault_dynamo" { + count = var.enable_dynamo_backend ? 1 : 0 + statement { + effect = "Allow" + actions = [ + "dynamodb:DescribeLimits", + "dynamodb:DescribeTimeToLive", + "dynamodb:ListTagsOfResource", + "dynamodb:DescribeReservedCapacityOfferings", + "dynamodb:DescribeReservedCapacity", + "dynamodb:ListTables", + "dynamodb:BatchGetItem", + "dynamodb:BatchWriteItem", + "dynamodb:CreateTable", + "dynamodb:DeleteItem", + "dynamodb:GetItem", + "dynamodb:GetRecords", + "dynamodb:PutItem", + "dynamodb:Query", + "dynamodb:UpdateItem", + "dynamodb:Scan", + "dynamodb:DescribeTable" + ] + resources = [ + format("arn:aws:dynamodb:%s:%s:table/%s", + var.dynamo_table_region == "" ? data.aws_region.current.name : var.dynamo_table_region, + data.aws_caller_identity.current.account_id, + var.dynamo_table_name + ) + ] + } +} + +resource "aws_iam_role_policy" "vault_dynamo" { + count = var.enable_dynamo_backend ? 1 : 0 + name = "vault_dynamo" + role = aws_iam_role.instance_role.id + policy = element( + concat(data.aws_iam_policy_document.vault_dynamo.*.json, [""]), + 0, + ) +} + data "aws_iam_policy_document" "vault_auto_unseal_kms" { count = var.enable_auto_unseal ? 1 : 0 diff --git a/modules/vault-cluster/variables.tf b/modules/vault-cluster/variables.tf index 4067580f..1349a6c5 100644 --- a/modules/vault-cluster/variables.tf +++ b/modules/vault-cluster/variables.tf @@ -219,3 +219,20 @@ variable "enabled_metrics" { default = [] } +variable "enable_dynamo_backend" { + description = "Whether to use a DynamoDB storage backend instead of Consul" + type = bool + default = false +} + +variable "dynamo_table_name" { + description = "Table name for the storage backend, required if `enable_dynamo_backend = true`" + type = string + default = "" +} + +variable "dynamo_table_region" { + description = "Table region used for the instance policy. Uses the current region if not supplied. Global tables should use `*` to allow for a cross region deployment to write to their respective table" + type = string + default = "" +} \ No newline at end of file diff --git a/test/vault_cluster_auth_test.go b/test/vault_cluster_auth_test.go index 6dc38ca9..4b86c87d 100644 --- a/test/vault_cluster_auth_test.go +++ b/test/vault_cluster_auth_test.go @@ -10,11 +10,11 @@ import ( "github.com/gruntwork-io/terratest/modules/aws" "github.com/gruntwork-io/terratest/modules/files" - "github.com/gruntwork-io/terratest/modules/http-helper" + http_helper "github.com/gruntwork-io/terratest/modules/http-helper" "github.com/gruntwork-io/terratest/modules/logger" "github.com/gruntwork-io/terratest/modules/random" "github.com/gruntwork-io/terratest/modules/terraform" - "github.com/gruntwork-io/terratest/modules/test-structure" + test_structure "github.com/gruntwork-io/terratest/modules/test-structure" ) const VAULT_EC2_AUTH_PATH = "examples/vault-ec2-auth" @@ -55,6 +55,8 @@ func runVaultEC2AuthTest(t *testing.T, amiId string, awsRegion string, sshUserNa terraformVars := map[string]interface{}{ VAR_VAULT_AUTH_SERVER_NAME: fmt.Sprintf("vault-auth-test-%s", uniqueId), VAR_VAULT_SECRET_NAME: exampleSecret, + VAR_CONSUL_CLUSTER_NAME: fmt.Sprintf("consul-test-%s", uniqueId), + VAR_CONSUL_CLUSTER_TAG_KEY: fmt.Sprintf("consul-test-%s", uniqueId), } deployCluster(t, amiId, awsRegion, examplesDir, uniqueId, terraformVars) }) @@ -93,6 +95,8 @@ func runVaultIAMAuthTest(t *testing.T, amiId string, awsRegion string, sshUserNa VAR_VAULT_AUTH_SERVER_NAME: fmt.Sprintf("vault-auth-test-%s", uniqueId), VAR_VAULT_IAM_AUTH_ROLE: fmt.Sprintf("vault-auth-role-test-%s", uniqueId), VAR_VAULT_SECRET_NAME: exampleSecret, + VAR_CONSUL_CLUSTER_NAME: fmt.Sprintf("consul-test-%s", uniqueId), + VAR_CONSUL_CLUSTER_TAG_KEY: fmt.Sprintf("consul-test-%s", uniqueId), } deployCluster(t, amiId, awsRegion, examplesDir, uniqueId, terraformVars) }) @@ -131,6 +135,8 @@ func runVaultAgentTest(t *testing.T, amiId string, awsRegion string, sshUserName VAR_VAULT_AUTH_SERVER_NAME: fmt.Sprintf("vault-auth-test-%s", uniqueId), VAR_VAULT_IAM_AUTH_ROLE: fmt.Sprintf("vault-auth-role-test-%s", uniqueId), VAR_VAULT_SECRET_NAME: exampleSecret, + VAR_CONSUL_CLUSTER_NAME: fmt.Sprintf("consul-test-%s", uniqueId), + VAR_CONSUL_CLUSTER_TAG_KEY: fmt.Sprintf("consul-test-%s", uniqueId), } deployCluster(t, amiId, awsRegion, examplesDir, uniqueId, terraformVars) }) diff --git a/test/vault_cluster_autounseal_test.go b/test/vault_cluster_autounseal_test.go index 6378645b..c6a32ad5 100644 --- a/test/vault_cluster_autounseal_test.go +++ b/test/vault_cluster_autounseal_test.go @@ -11,7 +11,7 @@ import ( "github.com/gruntwork-io/terratest/modules/retry" "github.com/gruntwork-io/terratest/modules/ssh" "github.com/gruntwork-io/terratest/modules/terraform" - "github.com/gruntwork-io/terratest/modules/test-structure" + test_structure "github.com/gruntwork-io/terratest/modules/test-structure" ) // This is the alias of a KMS key we have previously created that lives in the @@ -52,6 +52,8 @@ func runVaultAutoUnsealTest(t *testing.T, amiId string, awsRegion string, sshUse terraformVars := map[string]interface{}{ VAR_VAULT_AUTO_UNSEAL_KMS_KEY_ALIAS: AUTO_UNSEAL_KMS_KEY_ALIAS, VAR_VAULT_CLUSTER_SIZE: 1, + VAR_CONSUL_CLUSTER_NAME: fmt.Sprintf("consul-test-%s", uniqueId), + VAR_CONSUL_CLUSTER_TAG_KEY: fmt.Sprintf("consul-test-%s", uniqueId), } deployCluster(t, amiId, awsRegion, examplesDir, uniqueId, terraformVars) }) diff --git a/test/vault_cluster_dynamodb_backend_test.go b/test/vault_cluster_dynamodb_backend_test.go new file mode 100644 index 00000000..c2914c1b --- /dev/null +++ b/test/vault_cluster_dynamodb_backend_test.go @@ -0,0 +1,51 @@ +package test + +import ( + "fmt" + "testing" + + "github.com/gruntwork-io/terratest/modules/random" + test_structure "github.com/gruntwork-io/terratest/modules/test-structure" +) + +const VAULT_CLUSTER_DYNAMODB_BACKEND_PATH = "examples/vault-dynamodb-backend" + +const VAR_DYNAMO_TABLE_NAME = "dynamo_table_name" + +// Test the Vault with DynamoDB Backend example by: +// +// 1. Copy the code in this repo to a temp folder so tests on the Terraform code can run in parallel without the +// state files overwriting each other. +// 2. Build the AMI in the vault-consul-ami example with the given build name +// 3. Deploy that AMI using the example Terraform code +// 4. SSH to a Vault node and initialize the Vault cluster +// 5. SSH to each Vault node and unseal it +// 6. Connect to the Vault cluster via the ELB +func runVaultWithDynamoBackendClusterTest(t *testing.T, amiId string, awsRegion, sshUserName string) { + examplesDir := test_structure.CopyTerraformFolderToTemp(t, REPO_ROOT, VAULT_CLUSTER_DYNAMODB_BACKEND_PATH) + + defer test_structure.RunTestStage(t, "teardown", func() { + teardownResources(t, examplesDir) + }) + + defer test_structure.RunTestStage(t, "log", func() { + terraformOptions := test_structure.LoadTerraformOptions(t, examplesDir) + keyPair := test_structure.LoadEc2KeyPair(t, examplesDir) + + getVaultLogs(t, "vaultClusterWithDynamoBackend", terraformOptions, amiId, awsRegion, sshUserName, keyPair) + }) + + test_structure.RunTestStage(t, "deploy", func() { + terraformVars := map[string]interface{}{ + VAR_DYNAMO_TABLE_NAME: fmt.Sprintf("vault-dynamo-test-%s", random.UniqueId()), + } + deployCluster(t, amiId, awsRegion, examplesDir, random.UniqueId(), terraformVars) + }) + + test_structure.RunTestStage(t, "validate", func() { + terraformOptions := test_structure.LoadTerraformOptions(t, examplesDir) + keyPair := test_structure.LoadEc2KeyPair(t, examplesDir) + + initializeAndUnsealVaultCluster(t, OUTPUT_VAULT_CLUSTER_ASG_NAME, sshUserName, terraformOptions, awsRegion, keyPair) + }) +} diff --git a/test/vault_cluster_enterprise_test.go b/test/vault_cluster_enterprise_test.go index 4b2ca281..4e4aad4f 100644 --- a/test/vault_cluster_enterprise_test.go +++ b/test/vault_cluster_enterprise_test.go @@ -12,7 +12,7 @@ import ( "github.com/gruntwork-io/terratest/modules/retry" "github.com/gruntwork-io/terratest/modules/ssh" "github.com/gruntwork-io/terratest/modules/terraform" - "github.com/gruntwork-io/terratest/modules/test-structure" + test_structure "github.com/gruntwork-io/terratest/modules/test-structure" ) // To test this on circle ci you need a url set as an environment variable, VAULT_AMI_TEMPLATE_VAR_DOWNLOAD_URL @@ -51,7 +51,12 @@ func runVaultEnterpriseClusterTest(t *testing.T, amiId string, awsRegion string, }) test_structure.RunTestStage(t, "deploy", func() { - deployCluster(t, amiId, awsRegion, examplesDir, random.UniqueId(), nil) + uniqueId := random.UniqueId() + terraformVars := map[string]interface{}{ + VAR_CONSUL_CLUSTER_NAME: fmt.Sprintf("consul-test-%s", uniqueId), + VAR_CONSUL_CLUSTER_TAG_KEY: fmt.Sprintf("consul-test-%s", uniqueId), + } + deployCluster(t, amiId, awsRegion, examplesDir, uniqueId, terraformVars) }) test_structure.RunTestStage(t, "validate", func() { diff --git a/test/vault_cluster_private_test.go b/test/vault_cluster_private_test.go index 9b4c9c7e..f115b336 100644 --- a/test/vault_cluster_private_test.go +++ b/test/vault_cluster_private_test.go @@ -1,10 +1,11 @@ package test import ( + "fmt" "testing" "github.com/gruntwork-io/terratest/modules/random" - "github.com/gruntwork-io/terratest/modules/test-structure" + test_structure "github.com/gruntwork-io/terratest/modules/test-structure" ) const VAULT_CLUSTER_PRIVATE_PATH = "examples/vault-cluster-private" @@ -33,7 +34,12 @@ func runVaultPrivateClusterTest(t *testing.T, amiId string, awsRegion string, ss }) test_structure.RunTestStage(t, "deploy", func() { - deployCluster(t, amiId, awsRegion, examplesDir, random.UniqueId(), nil) + uniqueId := random.UniqueId() + terraformVars := map[string]interface{}{ + VAR_CONSUL_CLUSTER_NAME: fmt.Sprintf("consul-test-%s", uniqueId), + VAR_CONSUL_CLUSTER_TAG_KEY: fmt.Sprintf("consul-test-%s", uniqueId), + } + deployCluster(t, amiId, awsRegion, examplesDir, random.UniqueId(), terraformVars) }) test_structure.RunTestStage(t, "validate", func() { diff --git a/test/vault_cluster_public_test.go b/test/vault_cluster_public_test.go index adeceaf1..54f9497b 100644 --- a/test/vault_cluster_public_test.go +++ b/test/vault_cluster_public_test.go @@ -1,10 +1,11 @@ package test import ( + "fmt" "testing" "github.com/gruntwork-io/terratest/modules/random" - "github.com/gruntwork-io/terratest/modules/test-structure" + test_structure "github.com/gruntwork-io/terratest/modules/test-structure" ) const VAULT_CLUSTER_PUBLIC_PATH = REPO_ROOT @@ -37,10 +38,13 @@ func runVaultPublicClusterTest(t *testing.T, amiId string, awsRegion string, ssh }) test_structure.RunTestStage(t, "deploy", func() { + uniqueId := random.UniqueId() terraformVars := map[string]interface{}{ VAULT_CLUSTER_PUBLIC_VAR_CREATE_DNS_ENTRY: boolToTerraformVar(false), VAULT_CLUSTER_PUBLIC_VAR_HOSTED_ZONE_DOMAIN_NAME: "", VAULT_CLUSTER_PUBLIC_VAR_VAULT_DOMAIN_NAME: "", + VAR_CONSUL_CLUSTER_NAME: fmt.Sprintf("consul-test-%s", uniqueId), + VAR_CONSUL_CLUSTER_TAG_KEY: fmt.Sprintf("consul-test-%s", uniqueId), } deployCluster(t, amiId, awsRegion, examplesDir, random.UniqueId(), terraformVars) }) diff --git a/test/vault_cluster_s3_backend_test.go b/test/vault_cluster_s3_backend_test.go index cb028cf8..4d9405cc 100644 --- a/test/vault_cluster_s3_backend_test.go +++ b/test/vault_cluster_s3_backend_test.go @@ -1,10 +1,11 @@ package test import ( + "fmt" "testing" "github.com/gruntwork-io/terratest/modules/random" - "github.com/gruntwork-io/terratest/modules/test-structure" + test_structure "github.com/gruntwork-io/terratest/modules/test-structure" ) const VAULT_CLUSTER_S3_BACKEND_PATH = "examples/vault-s3-backend" @@ -40,6 +41,8 @@ func runVaultWithS3BackendClusterTest(t *testing.T, amiId string, awsRegion, ssh terraformVars := map[string]interface{}{ VAR_S3_BUCKET_NAME: s3BucketName(uniqueId), VAR_FORCE_DESTROY_S3_BUCKET: true, + VAR_CONSUL_CLUSTER_NAME: fmt.Sprintf("consul-test-%s", uniqueId), + VAR_CONSUL_CLUSTER_TAG_KEY: fmt.Sprintf("consul-test-%s", uniqueId), } deployCluster(t, amiId, awsRegion, examplesDir, uniqueId, terraformVars) }) diff --git a/test/vault_helpers.go b/test/vault_helpers.go index 68cf6261..cc11523a 100644 --- a/test/vault_helpers.go +++ b/test/vault_helpers.go @@ -18,7 +18,7 @@ import ( "github.com/gruntwork-io/terratest/modules/retry" "github.com/gruntwork-io/terratest/modules/ssh" "github.com/gruntwork-io/terratest/modules/terraform" - "github.com/gruntwork-io/terratest/modules/test-structure" + test_structure "github.com/gruntwork-io/terratest/modules/test-structure" "github.com/hashicorp/vault/api" "github.com/stretchr/testify/require" ) @@ -102,11 +102,9 @@ func deployCluster(t *testing.T, amiId string, awsRegion string, examplesDir str terraformOptions := &terraform.Options{ TerraformDir: examplesDir, Vars: mergeMaps(terraformVars, map[string]interface{}{ - VAR_AMI_ID: amiId, - VAR_VAULT_CLUSTER_NAME: fmt.Sprintf("vault-test-%s", uniqueId), - VAR_CONSUL_CLUSTER_NAME: fmt.Sprintf("consul-test-%s", uniqueId), - VAR_CONSUL_CLUSTER_TAG_KEY: fmt.Sprintf("consul-test-%s", uniqueId), - VAR_SSH_KEY_NAME: keyPair.Name, + VAR_AMI_ID: amiId, + VAR_VAULT_CLUSTER_NAME: fmt.Sprintf("vault-test-%s", uniqueId), + VAR_SSH_KEY_NAME: keyPair.Name, }), EnvVars: map[string]string{ ENV_VAR_AWS_REGION: awsRegion, diff --git a/test/vault_main_test.go b/test/vault_main_test.go index 905a37d2..9a924f09 100644 --- a/test/vault_main_test.go +++ b/test/vault_main_test.go @@ -6,7 +6,7 @@ import ( "github.com/gruntwork-io/terratest/modules/aws" "github.com/gruntwork-io/terratest/modules/packer" - "github.com/gruntwork-io/terratest/modules/test-structure" + test_structure "github.com/gruntwork-io/terratest/modules/test-structure" ) const AMI_EXAMPLE_PATH = "../examples/vault-consul-ami/vault-consul.json" @@ -59,6 +59,11 @@ var testCases = []testCase{ runVaultWithS3BackendClusterTest, false, }, + { + "TestVaultWithDynamoDBBackend", + runVaultWithDynamoBackendClusterTest, + false, + }, { "TestVaultPrivateCluster", runVaultPrivateClusterTest,