Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Azure support #24

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Add initial basic Azure support
marmarama committed Jun 11, 2019
commit 5b2fbddd2071c55e741b8e3464d09bdf05888a32
72 changes: 59 additions & 13 deletions bin/terraform.sh
Original file line number Diff line number Diff line change
@@ -95,6 +95,34 @@ additional arguments:
EOF
};

function storage_ls() {
case "${storage_provider}" in
aws)
aws s3 ls s3://${bucket}/${project}/${account_id}/${region}/${environment}/${dynamic_file_name}
;;
gcloud)
gsutil ls gs://${bucket}/${project}/${account_id}/${region}/${environment}/${dynamic_file_name}
;;
azurerm)
az storage blob list
;;
esac
};

function storage_cp() {
case "${storage_provider}" in
aws)
aws s3 cp s3://${bucket}/${project}/${account_id}/${region}/${environment}/${dynamic_file_name}
;;
gcloud)
gsutil cp gs://${bucket}/${project}/${account_id}/${region}/${environment}/${dynamic_file_name}
;;
azurerm)
az storage blob copy start
;;
esac
};

##
# Test for GNU getopt
##
@@ -251,16 +279,24 @@ readonly region="${region_arg:-${AWS_DEFAULT_REGION}}";
[ -n "${storage_provider}" ] \
|| storage_provider="aws";

if [ "${storage_provider}" == "aws" ]; then
storage_cmd="aws s3";
if [ "${storage_provider}" == "aws" ]; then
storage_cp_cmd="aws s3 cp";
storage_ls_cmd="aws s3 ls";
verify_cmd="aws sts get-caller-identity --query Arn --output text";
account_cmd="aws sts get-caller-identity --query Account --output text";
storage_url="s3://"
elif [ "${storage_provider}" == "gcloud" ]; then
storage_cmd="gsutil";
storage_cp_cmd="gsutil cp";
storage_ls_cmd="gsutil ls";
verify_cmd="gcloud config list --format value(core.account)";
account_cmd="gcloud config list --format value(core.project)";
storage_url="gs://"
elif [ "${storage_provider}" == "azurerm" ]; then
storage_cp_cmd="az storage blob copy start" ;
storage_ls_cmd="az storage blob list";
verify_cmd="az ad signed-in-user show --query userPrincipalName --output tsv";
account_cmd="az account list --query [?isDefault==\`true\`].name --output tsv";
storage_url="azurerm://"
fi

# Bootstrapping is special
@@ -295,15 +331,15 @@ else
error_and_die "No Credentials Found. \"${verify_cmd}\" responded with '${verify}'";
fi;

# Query canonical AWS Account ID
account_id="$($account_cmd)";
# Query canonical Account ID
account_id="$(${account_cmd})";
if [ -n "${account_id}" ]; then
echo -e "Account ID: ${account_id}";
else
error_and_die "Couldn't determine Account ID. \"${account_cmd}\" provided no output";
fi;

# Validate S3 bucket. Set default if undefined
# Validate bucket. Set default if undefined
if [ -n "${bucket_prefix}" ]; then
readonly bucket="${bucket_prefix}-${account_id}-${region}"
echo -e "Using bucket ${storage_url}${bucket}";
@@ -314,7 +350,7 @@ fi;

declare component_path;
if [ "${bootstrap}" == "true" ]; then
component_path="${base_path}/bootstrap";
component_path="${base_path}/bootstrap/${storage_provider}";
else
component_path="${base_path}/components/${component}";
fi;
@@ -421,10 +457,10 @@ else
declare -a secrets=();
readonly secrets_file_name="secret.tfvars.enc";
readonly secrets_file_path="build/${secrets_file_name}";
${storage_cmd} ls ${storage_url}${bucket}/${project}/${account_id}/${region}/${environment}/${secrets_file_name} >/dev/null 2>&1;
${storage_ls_cmd} ${storage_url}${bucket}/${project}/${account_id}/${region}/${environment}/${secrets_file_name} >/dev/null 2>&1;
if [ $? -eq 0 ]; then
mkdir -p build;
${storage_cmd} cp ${storage_url}${bucket}/${project}/${account_id}/${region}/${environment}/${secrets_file_name} ${secrets_file_path} \
${storage_cp_cmd} ${storage_url}${bucket}/${project}/${account_id}/${region}/${environment}/${secrets_file_name} ${secrets_file_path} \
|| error_and_die "S3 secrets file is present, but inaccessible. Ensure you have permission to read ${storage_url}${bucket}/${project}/${account_id}/${region}/${environment}/${secrets_file_name}";
if [ -f "${secrets_file_path}" ]; then
secrets=($(aws kms decrypt --ciphertext-blob fileb://${secrets_file_path} --output text --query Plaintext | base64 --decode));
@@ -454,9 +490,9 @@ else
# Use this feature only if you're sure it's the right pattern for your use case.
readonly dynamic_file_name="dynamic.tfvars";
readonly dynamic_file_path="build/${dynamic_file_name}";
${storage_cmd} ls ${storage_url}${bucket}/${project}/${account_id}/${region}/${environment}/${dynamic_file_name} >/dev/null 2>&1;
${storage_ls_cmd} ${storage_url}${bucket}/${project}/${account_id}/${region}/${environment}/${dynamic_file_name} >/dev/null 2>&1;
if [ $? -eq 0 ]; then
${storage_cmd} cp ${storage_url}${bucket}/${project}/${account_id}/${region}/${environment}/${dynamic_file_name} ${dynamic_file_path} \
${storage_cp_cmd} ${storage_url}${bucket}/${project}/${account_id}/${region}/${environment}/${dynamic_file_name} ${dynamic_file_path} \
|| error_and_die "S3 tfvars file is present, but inaccessible. Ensure you have permission to read s3://${bucket}/${project}/${account_id}/${region}/${environment}/${dynamic_file_name}";
fi;

@@ -581,6 +617,16 @@ if [ ${storage_provider} == "gcloud" ]; then
}
}";

elif [ ${storage_provider} == "azurerm" ]; then
readonly backend_config="terraform {
backend \"azurerm\" {
storage_account_name = \"${project}${region}tfstate\"
container_name = \"${bucket,,}\"
key = \"${backend_key}\"
resource_group_name = \"${bucket,,}\"
}
}";

else
readonly backend_config="terraform {
backend \"s3\" {
@@ -658,7 +704,7 @@ case "${action}" in
|| error_and_die "Terraform plan failed";

if [ -n "${build_id}" ]; then
${storage_cmd} cp build/${plan_file_name} ${storage_url}${bucket}/${plan_file_remote_key} \
${storage_cp_cmd} build/${plan_file_name} ${storage_url}${bucket}/${plan_file_remote_key} \
|| error_and_die "Plan file upload failed (${storage_url}${bucket}/${plan_file_remote_key})";
fi;

@@ -687,7 +733,7 @@ case "${action}" in
plan_file_name="${component_name}_${build_id}.tfplan";
plan_file_remote_key="${backend_prefix}/plans/${plan_file_name}";

${storage_cmd} cp ${storage_url}${bucket}/${plan_file_remote_key} build/${plan_file_name} \
${storage_cp_cmd} ${storage_url}${bucket}/${plan_file_remote_key} build/${plan_file_name} \
|| error_and_die "Plan file download from S3 failed (s3://${bucket}/${plan_file_remote_key})";

apply_plan="build/${plan_file_name}";
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
3 changes: 3 additions & 0 deletions bootstrap/azurerm/outputs.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
output "bucket_name" {
value = "${azurerm_storage_container.container.id}"
}
1 change: 1 addition & 0 deletions bootstrap/azurerm/provider_azure.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
provider "azurerm" {}
60 changes: 60 additions & 0 deletions bootstrap/azurerm/storage_container.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
/* resource "aws_s3_bucket" "bucket" {
bucket = "${var.bucket_name}"
acl = "private"

force_destroy = "false"

versioning {
enabled = "true"
}

lifecycle_rule {
prefix = "/"
enabled = "true"

noncurrent_version_transition {
days = "30"
storage_class = "STANDARD_IA"
}

noncurrent_version_transition {
days = "60"
storage_class = "GLACIER"
}

noncurrent_version_expiration {
days = "90"
}
}

# This does not use default tag map merging because bootstrapping is special
# You should use default tag map merging elsewhere
tags {
"Name" = "Terraform Scaffold State File Bucket for account ${var.aws_account_id} in region ${var.region}"
"Environment" = "${var.environment}"
"Project" = "${var.project}"
"Component" = "${var.component}"
"Account" = "${var.aws_account_id}"
}
}
*/

resource "azurerm_resource_group" "container" {
name = "${lower(var.bucket_name)}"
location = "${var.region}"
}

resource "azurerm_storage_account" "container" {
name = "${var.project}${var.region}tfstate"
resource_group_name = "${azurerm_resource_group.container.name}"
location = "${var.region}"
account_tier = "Standard"
account_replication_type = "LRS"
}

resource "azurerm_storage_container" "container" {
name = "${lower(var.bucket_name)}"
resource_group_name = "${azurerm_resource_group.container.name}"
storage_account_name = "${azurerm_storage_account.container.name}"
container_access_type = "private"
}
31 changes: 31 additions & 0 deletions bootstrap/azurerm/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
variable "project" {
type = "string"
description = "The name of the Project we are bootstrapping terraformscaffold for"
}

variable "account_id" {
type = "string"
description = "The Azure Subscription ID into which we are bootstrapping terraformscaffold"
}

variable "region" {
type = "string"
description = "The Azure Region into which we are bootstrapping terraformscaffold"
}

variable "environment" {
type = "string"
description = "The name of the environment for the bootstrapping process; which is always bootstrap"
default = "bootstrap"
}

variable "component" {
type = "string"
description = "The name of the component for the bootstrapping process; which is always bootstrap"
default = "bootstrap"
}

variable "bucket_name" {
type = "string"
description = "The name to use for the terraformscaffold bucket"
}