adding major 4.16.0.20.0 #175
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: 'DSF Single Account CLI - AWS' | |
concurrency: | |
group: dsf_single_account | |
on: | |
workflow_call: | |
inputs: | |
branch: | |
required: true | |
type: string | |
secrets: | |
AWS_ACCESS_KEY_ID: | |
required: true | |
AWS_SECRET_ACCESS_KEY: | |
required: true | |
AWS_ACCESS_KEY_ID_STAGE: | |
required: true | |
AWS_SECRET_ACCESS_KEY_STAGE: | |
required: true | |
SLACK_WEBHOOK_URL: | |
required: true | |
JUMP_SERVER_KEY: | |
required: true | |
DAM_LICENSE: | |
required: true | |
DEPLOYMENT_TAGS : | |
required: true | |
workflow_dispatch: | |
inputs: | |
branch: | |
required: true | |
type: string | |
delay_destroy: | |
description: 'Delay the destroy step and subsequent steps to allow investigation' | |
type: boolean | |
default: false | |
required: false | |
push: | |
branches: | |
- 'dev' | |
paths: | |
- 'modules/aws/**' | |
- '!modules/aws/db-with-agent/*' | |
- '!modules/aws/poc-db-onboarder/*' | |
- '!modules/aws/rds-mssql-db/*' | |
- '!modules/aws/rds-mysql-db/*' | |
- '!modules/aws/rds-postgres-db/*' | |
- '!modules/aws/sonar-upgrader/**' | |
- 'modules/null/**' | |
- 'examples/aws/installation/dsf_single_account_deployment/*' | |
pull_request: | |
types: | |
- 'opened' | |
- 'reopened' | |
branches: | |
- 'dev' | |
paths: | |
- 'modules/aws/**' | |
- '!modules/aws/db-with-agent/*' | |
- '!modules/aws/poc-db-onboarder/*' | |
- '!modules/aws/rds-mssql-db/*' | |
- '!modules/aws/rds-mysql-db/*' | |
- '!modules/aws/rds-postgres-db/*' | |
- '!modules/aws/sonar-upgrader/*' | |
- 'modules/null/**' | |
- 'examples/aws/installation/dsf_single_account_deployment/*' | |
env: | |
TF_CLI_ARGS: "-no-color" | |
TF_INPUT: 0 | |
TF_VAR_gw_count: 1 | |
EXAMPLE_DIR: ./examples/aws/installation/dsf_single_account_deployment | |
AWS_REGION: ap-southeast-1 | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
AWS_ACCESS_KEY_ID_STAGE: ${{ secrets.AWS_ACCESS_KEY_ID_STAGE }} | |
AWS_SECRET_ACCESS_KEY_STAGE: ${{ secrets.AWS_SECRET_ACCESS_KEY_STAGE }} | |
JUMP_SERVER_KEY: ${{ secrets.JUMP_SERVER_KEY }} | |
AWS_KEY_PATH: "jump_server_key.cer" | |
DESTROY_DELAY_SECONDS: 1800 | |
TF_VAR_additional_tags: ${{ secrets.DEPLOYMENT_TAGS }} | |
permissions: | |
contents: read | |
jobs: | |
terraform: | |
name: 'DSF Single Account ${{ inputs.branch }}' | |
runs-on: ubuntu-latest | |
environment: test | |
# Use the Bash shell regardless whether the GitHub Actions runner is ubuntu-latest, macos-latest, or windows-latest | |
defaults: | |
run: | |
shell: bash | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v3 | |
with: | |
ref: ${{ inputs.branch }} | |
- name: Change the modules source to local | |
run: | | |
find ./examples/ -type f -exec sed -i -f sed.expr {} \; | |
- name: Set Workspace | |
run: | | |
if [ ${{ inputs.branch }} == 'master' ] || [ ${{ inputs.branch }} == 'dev' ]; then | |
echo "TF_WORKSPACE=dsf_single_account_${{ inputs.branch }}" >> $GITHUB_ENV | |
else | |
echo "TF_WORKSPACE=dsf_single_account" >> $GITHUB_ENV | |
fi | |
- name: Create terraform backend file | |
run: | | |
cat << EOF > $EXAMPLE_DIR/backend.tf | |
terraform { | |
backend "s3" { | |
bucket = "terraform-state-bucket-dsfkit-github-tests" | |
key = "states/terraform.tfstate" | |
dynamodb_table = "terraform-state-lock" | |
region = "us-east-1" | |
} | |
} | |
EOF | |
- name: Create tfvars File | |
run: | | |
cat << EOF > $EXAMPLE_DIR/terraform.tfvars | |
${{ vars.TFVAR_PARAMETERS_DSF_SINGLE_ACCOUNT_AUTOMATION_PRIVATE_SUBNETS_V1 }} | |
EOF | |
- name: Cat tfvars File | |
run: cat $EXAMPLE_DIR/terraform.tfvars | |
- name: Create License File | |
env: | |
MY_SECRET: ${{ secrets.DAM_LICENSE }} | |
run: | | |
echo "${{ secrets.DAM_LICENSE }}" | base64 -d > $EXAMPLE_DIR/license.mprv | |
cat $EXAMPLE_DIR/license.mprv | |
- name: Add Profile Credentials to ~/.aws/credentials | |
run: | | |
aws configure set aws_access_key_id ${{ secrets.AWS_ACCESS_KEY_ID_STAGE }} --profile ${{ vars.STAGE_PROFILE_NAME }} | |
aws configure set aws_secret_access_key ${{ secrets.AWS_SECRET_ACCESS_KEY_STAGE }} --profile ${{ vars.STAGE_PROFILE_NAME }} | |
- name: Get The Public IP | |
run: echo curr_ip=$(curl -s https://ipinfo.io/ip) >> $GITHUB_ENV | |
- name: Set IP in AWS Security Group | |
env: | |
AWS_REGION: ap-southeast-1 | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_STAGE }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_STAGE }} | |
run: | | |
aws_sg=$(aws ec2 authorize-security-group-ingress --group-id ${{ vars.JUMP_SERVER_SG_ID }} --protocol tcp --port 22 --cidr $curr_ip/32) | |
echo sg_id=$(echo $aws_sg | jq '.SecurityGroupRules[0].SecurityGroupRuleId') >> $GITHUB_ENV | |
aws_sg_for_singapore=$(aws ec2 authorize-security-group-ingress --group-id ${{ vars.SINGAPORE_SG_ID }} --protocol tcp --port 0-65535 --cidr $curr_ip/32) | |
echo sg_id_for_singapore=$(echo $aws_sg_for_singapore | jq '.SecurityGroupRules[0].SecurityGroupRuleId') >> $GITHUB_ENV | |
- name: Set IP in AWS Security Group - Sydney | |
env: | |
AWS_REGION: ap-southeast-2 | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_STAGE }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_STAGE }} | |
run: | | |
aws_sg_for_sydney=$(aws ec2 authorize-security-group-ingress --group-id ${{ vars.SYDNEY_SG_ID }} --protocol tcp --port 0-65535 --cidr $curr_ip/32) | |
echo sg_id_for_sydney=$(echo $aws_sg_for_sydney | jq '.SecurityGroupRules[0].SecurityGroupRuleId') >> $GITHUB_ENV | |
- name: Create Key File | |
run: | | |
echo "${{ secrets.JUMP_SERVER_KEY }}" > $EXAMPLE_DIR/$AWS_KEY_PATH | |
sudo chmod 400 $EXAMPLE_DIR/$AWS_KEY_PATH | |
# Install the latest version of Terraform CLI and configure the Terraform CLI configuration file with a Terraform Cloud user API token | |
- name: Setup Terraform | |
uses: hashicorp/setup-terraform@v2 | |
with: | |
terraform_wrapper: false | |
terraform_version: ~1.7.0 | |
- name: Setup jq | |
uses: sergeysova/jq-action@v2 | |
# Initialize a new or existing Terraform working directory by creating initial files, loading any remote state, downloading modules, etc. | |
- name: Terraform Init | |
run: terraform -chdir=$EXAMPLE_DIR init | |
- name: Cleaning environment | |
run: | | |
mv $EXAMPLE_DIR/main.tf{,_} | |
mv $EXAMPLE_DIR/outputs.tf{,_} | |
mv $EXAMPLE_DIR/dam.tf{,_} | |
mv $EXAMPLE_DIR/dra.tf{,_} | |
mv $EXAMPLE_DIR/sonar.tf{,_} | |
mv $EXAMPLE_DIR/networking.tf{,_} | |
terraform -chdir=$EXAMPLE_DIR destroy -auto-approve | |
mv $EXAMPLE_DIR/main.tf{_,} | |
mv $EXAMPLE_DIR/outputs.tf{_,} | |
mv $EXAMPLE_DIR/dam.tf{_,} | |
mv $EXAMPLE_DIR/dra.tf{_,} | |
mv $EXAMPLE_DIR/sonar.tf{_,} | |
mv $EXAMPLE_DIR/networking.tf{_,} | |
- name: Terraform Validate | |
run: terraform -chdir=$EXAMPLE_DIR validate | |
# Generates an execution plan for Terraform | |
- name: Terraform Plan | |
run: terraform -chdir=$EXAMPLE_DIR plan | |
- name: Terraform Apply | |
run: terraform -chdir=$EXAMPLE_DIR apply -auto-approve | |
- name: Terraform Output | |
if: always() | |
run: terraform -chdir=$EXAMPLE_DIR output -json | |
- name: Collect Artifacts | |
id: collect-artifacts | |
if: always() | |
uses: actions/upload-artifact@v4 | |
with: | |
name: collected-keys-${{ env.TF_WORKSPACE }} | |
path: | | |
${{ env.EXAMPLE_DIR }}/ssh_keys | |
- name: Check how was the workflow run | |
id: check-trigger | |
if: ${{ failure() }} | |
run: | | |
if [ "${{ github.event_name }}" == "schedule" ]; then | |
echo "run-by=Automation" >> $GITHUB_OUTPUT | |
else | |
echo "run-by=${{ github.actor }}" >> $GITHUB_OUTPUT | |
fi | |
# This step allows time for investigation of the failed resources before destroying them | |
- name: Conditional Delay | |
if: ${{ failure() }} | |
run: | | |
echo "delay_destroy: ${{ inputs.delay_destroy }}" | |
if [ "${{ inputs.delay_destroy }}" == "true" ]; then | |
echo "Terraform workspace: $TF_WORKSPACE" | |
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ github.workflow }} ${{ env.TF_WORKSPACE }} automation Failed*\n You have ${{ env.DESTROY_DELAY_SECONDS }} seconds to investigate the environment before it is destroyed :alarm_clock:\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }} | |
echo "" | |
echo "Sleeping for $((DESTROY_DELAY_SECONDS / 60)) minutes before destroying the environment" | |
sleep $DESTROY_DELAY_SECONDS | |
fi | |
- name: Terraform Destroy | |
if: always() | |
run: terraform -chdir=$EXAMPLE_DIR destroy -auto-approve | |
- name: Delete Security Group | |
env: | |
AWS_REGION: ap-southeast-1 | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_STAGE }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_STAGE }} | |
if: always() | |
run: | | |
aws ec2 revoke-security-group-ingress --group-id ${{ vars.JUMP_SERVER_SG_ID }} --security-group-rule-ids ${{ env.sg_id }} | |
aws ec2 revoke-security-group-ingress --group-id ${{ vars.SINGAPORE_SG_ID }} --security-group-rule-ids ${{ env.sg_id_for_singapore }} | |
- name: Delete Security Group - Sydney | |
env: | |
AWS_REGION: ap-southeast-2 | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_STAGE }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_STAGE }} | |
if: always() | |
run: | | |
aws ec2 revoke-security-group-ingress --group-id ${{ vars.SYDNEY_SG_ID }} --security-group-rule-ids ${{ env.sg_id_for_sydney }} | |
# Send job failure to Slack | |
- name: Send Slack When Failure | |
run: | | |
if [ ${{ inputs.branch }} == 'master' ]; then | |
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*DSF Single Account Prod ${{ inputs.workspace }} automation Failed*\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#dsfkit-prod"}' ${{ secrets.SLACK_WEBHOOK_URL }} | |
else | |
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*DSF Single Account Dev nightly ${{ inputs.workspace }} automation Failed*\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }} | |
fi | |
if: ${{ failure() }} |