From 725e7ad62686ea4be06d4b6c8a3b6532bf9cd1e4 Mon Sep 17 00:00:00 2001 From: abikouo Date: Tue, 19 Dec 2023 16:17:34 +0100 Subject: [PATCH 01/51] Fix integration test issue --- ...deploy_flask_app-update-arguments-spec.yml | 6 ++ playbooks/webapp/migrate_webapp.yaml | 1 + playbooks/webapp/webapp.yaml | 1 + playbooks/webapp/webapp_ha_aurora.yaml | 2 + roles/deploy_flask_app/README.md | 4 +- .../deploy_flask_app/meta/argument_specs.yml | 9 +- roles/deploy_flask_app/tasks/setup.yaml | 12 +-- .../targets/test_deploy_flask_app/aliases | 6 +- .../test_deploy_flask_app/handlers/main.yml | 6 ++ .../test_deploy_flask_app/tasks/create.yaml | 85 ++++++------------- .../test_deploy_flask_app/tasks/main.yaml | 6 +- 11 files changed, 55 insertions(+), 83 deletions(-) create mode 100644 changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml create mode 100644 tests/integration/targets/test_deploy_flask_app/handlers/main.yml diff --git a/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml b/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml new file mode 100644 index 00000000..b3e0bc34 --- /dev/null +++ b/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml @@ -0,0 +1,6 @@ +--- +breaking_changes: + - >- + roles/deploy_flask_app - Remove parameter ``deploy_flask_app_sshkey_pair_name`` that was previously required to create + ssh connection to the bastion host in favor of the new parameter ``deploy_flask_app_bastion_ssh_private_key`` defining + the path to the ssh private key file to use instead (). diff --git a/playbooks/webapp/migrate_webapp.yaml b/playbooks/webapp/migrate_webapp.yaml index 83886ad1..4aad09f8 100644 --- a/playbooks/webapp/migrate_webapp.yaml +++ b/playbooks/webapp/migrate_webapp.yaml @@ -58,6 +58,7 @@ ansible.builtin.import_role: name: cloud.aws_ops.deploy_flask_app vars: + deploy_flask_app_bastion_ssh_private_key: "{{ sshkey_file }}" deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" deploy_flask_app_vm_info: "{{ vm_result }}" diff --git a/playbooks/webapp/webapp.yaml b/playbooks/webapp/webapp.yaml index 0f169154..d27348f5 100644 --- a/playbooks/webapp/webapp.yaml +++ b/playbooks/webapp/webapp.yaml @@ -26,6 +26,7 @@ ansible.builtin.include_role: name: cloud.aws_ops.deploy_flask_app vars: + deploy_flask_app_bastion_ssh_private_key: "{{ sshkey_file }}" deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" deploy_flask_app_vm_info: "{{ vm_result }}" diff --git a/playbooks/webapp/webapp_ha_aurora.yaml b/playbooks/webapp/webapp_ha_aurora.yaml index a16a1ccd..fb5c6170 100644 --- a/playbooks/webapp/webapp_ha_aurora.yaml +++ b/playbooks/webapp/webapp_ha_aurora.yaml @@ -57,6 +57,7 @@ ansible.builtin.include_role: name: cloud.aws_ops.deploy_flask_app vars: + deploy_flask_app_bastion_ssh_private_key: "{{ sshkey_file }}" deploy_flask_app_private_subnet_id: "{{ primary_private_subnet.subnets[0].id }}" deploy_flask_app_vpc_id: "{{ primary_vpc.vpcs[0].id }}" deploy_flask_app_vm_info: "{{ primary_vm_result }}" @@ -96,6 +97,7 @@ ansible.builtin.include_role: name: cloud.aws_ops.deploy_flask_app vars: + deploy_flask_app_bastion_ssh_private_key: "{{ sshkey_file }}" deploy_flask_app_private_subnet_id: "{{ replica_private_subnet.subnets[0].id }}" deploy_flask_app_vpc_id: "{{ replica_vpc.vpcs[0].id }}" deploy_flask_app_vm_info: "{{ replica_vm_result }}" diff --git a/roles/deploy_flask_app/README.md b/roles/deploy_flask_app/README.md index 690013b9..d69edb33 100644 --- a/roles/deploy_flask_app/README.md +++ b/roles/deploy_flask_app/README.md @@ -24,20 +24,20 @@ Role Variables ## variables to create new hosts and groups in inventory of in memory playbook. * **deploy_flask_app_region** (str): Region where the app is to be deployed. -* **deploy_flask_app_bastion_host_username** (str): Username for the bastion host SSH user. * **deploy_flask_app_private_subnet_id** (str): Private subnet id of the bastion host * **deploy_flask_app_vpc_id** (str): vpc id for the host. * **deploy_flask_app_rds_info** (dict): A dict of information for the backend RDS. This dict has the output of amazon.aws.rds_instance_info mode. * **deploy_flask_app_rds_master_username** (str): Username for the RDS instance. * **deploy_flask_app_rds_master_password** (str): password for the RDS instance. * **deploy_flask_app_vm_info** (dict): A dict of information for the vm to use. This dict has the output of amazon.aws.ec2_instance_info module. -* **deploy_flask_app_sshkey_pair_name** (str): Name for the EC2 key pair. ## variables needed for the deployment # Bastion host * **deploy_flask_app_bastion_host_name** (str): Name for the EC2 instance. +* **deploy_flask_app_bastion_host_username** (str): Username for the bastion host SSH user. * **deploy_flask_app_bastion_host_required_packages** (list): Packages to be installed on the bastion host. +* **deploy_flask_app_bastion_ssh_private_key** (path): The path to the ssh private key file to use to connect to the bastion host. * **deploy_flask_app_number_of_workers** (int): Number of instances to create. * **deploy_flask_app_workers_instance_type** (str): RC2 instance type for workers. * **deploy_flask_app_workers_user_name** (str): Username for the workers. diff --git a/roles/deploy_flask_app/meta/argument_specs.yml b/roles/deploy_flask_app/meta/argument_specs.yml index 6c63ba4f..bb10c342 100644 --- a/roles/deploy_flask_app/meta/argument_specs.yml +++ b/roles/deploy_flask_app/meta/argument_specs.yml @@ -16,6 +16,11 @@ argument_specs: description: Name for the EC2 instance. type: str required: True + deploy_flask_app_bastion_ssh_private_key: + description: The path to ssh private key file to use to connect to the bastion host. + type: path + required: True + version_added: 2.1.0 deploy_flask_app_bastion_host_required_packages: description: Packages to be installed on the bastion host. type: list @@ -29,10 +34,6 @@ argument_specs: description: vpc id for the host. type: str required: True - deploy_flask_app_sshkey_pair_name: - description: Name for the EC2 key pair. - type: str - required: True deploy_flask_app_rds_info: description: A dict of information for the backend RDS. This dict has the output of amazon.aws.rds_instance_info module. type: dict diff --git a/roles/deploy_flask_app/tasks/setup.yaml b/roles/deploy_flask_app/tasks/setup.yaml index 55b68b47..7fc72188 100644 --- a/roles/deploy_flask_app/tasks/setup.yaml +++ b/roles/deploy_flask_app/tasks/setup.yaml @@ -5,16 +5,12 @@ - name: Create resources playbook block: - - name: Set 'sshkey_file' variable - ansible.builtin.set_fact: - deploy_flask_app_sshkey_file: ~/private-key-{{ deploy_flask_app_sshkey_pair_name }}-{{ deploy_flask_app_region | default(aws_region) }} - - name: Add host to inventory ansible.builtin.add_host: hostname: bastion ansible_ssh_user: "{{ deploy_flask_app_bastion_host_username }}" ansible_host: "{{ deploy_flask_app_vm_info.instances.0.public_ip_address }}" - ansible_ssh_common_args: -o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_sshkey_file }} + ansible_ssh_common_args: -o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_bastion_ssh_private_key }} ansible_python_interpreter: auto ansible_host_name: "{{ deploy_flask_app_vm_info.instances.0.public_dns_name | split('.') | first }}" host_config: @@ -26,7 +22,7 @@ vpc_id: "{{ deploy_flask_app_vpc_id }}" rds_info: host: "{{ deploy_flask_app_rds_info.instances.0.endpoint.address }}" - name: "{{ deploy_flask_app_rds_info.instances.0.db_name | default('mysampledb123') }}" - master_user_password: "{{ deploy_flask_app_rds_master_password | default('L#5cH2mgy_') }}" - master_username: "{{ deploy_flask_app_rds_master_username | default('ansible') }}" + name: "{{ deploy_flask_app_rds_info.instances.0.db_name }}" + master_user_password: "{{ deploy_flask_app_rds_master_password }}" + master_username: "{{ deploy_flask_app_rds_master_username }}" register: deploy_flask_app_setup diff --git a/tests/integration/targets/test_deploy_flask_app/aliases b/tests/integration/targets/test_deploy_flask_app/aliases index 931f237c..66d29cec 100644 --- a/tests/integration/targets/test_deploy_flask_app/aliases +++ b/tests/integration/targets/test_deploy_flask_app/aliases @@ -1,7 +1,3 @@ cloud/aws role/deploy_flask_app -time=35m - -# Integration tests are broken -# fatal: [testhost -> bastion]: UNREACHABLE! -disabled \ No newline at end of file +time=35m \ No newline at end of file diff --git a/tests/integration/targets/test_deploy_flask_app/handlers/main.yml b/tests/integration/targets/test_deploy_flask_app/handlers/main.yml new file mode 100644 index 00000000..2e32ab7d --- /dev/null +++ b/tests/integration/targets/test_deploy_flask_app/handlers/main.yml @@ -0,0 +1,6 @@ +--- +- name: Delete temporary key pair directory + ansible.builtin.file: + state: absent + path: "{{ test_deploy_flask_app__tmpdir.path }}" + when: test_deploy_flask_app__tmpdir is defined diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml index 87ed7431..90ffe0cb 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml @@ -70,7 +70,7 @@ state: present register: internet_gw - - name: Create NAT gateway (allow access to internet for instances in private subnet) + - name: Create NAT gateway attached to the public subnet (allow access to internet for instances in private subnet) amazon.aws.ec2_vpc_nat_gateway: subnet_id: "{{ subnet.subnet.id }}" if_exist_do_not_create: true @@ -141,65 +141,33 @@ state: present register: rds_sg - - name: Get RDS instance info - amazon.aws.rds_instance_info: - db_instance_identifier: "{{ rds_identifier }}" - register: rds_result - - - name: Create RDS instance - when: rds_result.instances | length == 0 - block: - - name: Create RDS instance (PostGreSQL Database) - amazon.aws.rds_instance: - force_update_password: true - wait: true - allocated_storage: "{{ rds_allocated_storage_gb }}" - backup_retention_period: 0 - db_instance_class: "{{ rds_instance_class }}" - db_instance_identifier: "{{ rds_identifier }}" - db_name: "{{ rds_instance_name }}" - engine: "{{ rds_engine }}" - engine_version: "{{ rds_engine_version }}" - master_user_password: "{{ deploy_flask_app_rds_master_password }}" - master_username: "{{ deploy_flask_app_rds_master_username }}" - monitoring_interval: 0 - storage_type: standard - skip_final_snapshot: true - db_subnet_group_name: "{{ rds_subnet_group_name }}" - vpc_security_group_ids: - - "{{ rds_sg.group_id }}" - when: rds_snapshot_arn is not defined - - - name: Create RDS instance from snapshot (PostGreSQL Database) - amazon.aws.rds_instance: - force_update_password: true - wait: true - allocated_storage: "{{ rds_allocated_storage_gb }}" - backup_retention_period: 0 - db_instance_class: "{{ rds_instance_class }}" - db_instance_identifier: "{{ rds_identifier }}" - engine: "{{ rds_engine }}" - engine_version: "{{ rds_engine_version }}" - master_user_password: "{{ deploy_flask_app_rds_master_password }}" - master_username: "{{ deploy_flask_app_rds_master_user }}" - monitoring_interval: 0 - storage_type: standard - skip_final_snapshot: true - db_subnet_group_name: "{{ rds_subnet_group_name }}" - vpc_security_group_ids: - - "{{ rds_sg.group_id }}" - creation_source: snapshot - db_snapshot_identifier: "{{ rds_snapshot_arn }}" - when: rds_snapshot_arn is defined - - - name: Get RDS instance info - amazon.aws.rds_instance_info: + - name: Create RDS instance (PostGreSQL Database) + amazon.aws.rds_instance: + force_update_password: true + wait: true + allocated_storage: "{{ rds_allocated_storage_gb }}" + backup_retention_period: 0 + db_instance_class: "{{ rds_instance_class }}" db_instance_identifier: "{{ rds_identifier }}" + db_name: "{{ rds_instance_name }}" + engine: "{{ rds_engine }}" + engine_version: "{{ rds_engine_version }}" + master_user_password: "{{ deploy_flask_app_rds_master_password }}" + master_username: "{{ deploy_flask_app_rds_master_username }}" + monitoring_interval: 0 + storage_type: standard + skip_final_snapshot: true + db_subnet_group_name: "{{ rds_subnet_group_name }}" + vpc_security_group_ids: + - "{{ rds_sg.group_id }}" register: rds_result - - name: Set 'sshkey_file' variable - ansible.builtin.set_fact: - sshkey_file: ~/private-key-{{ deploy_flask_app_sshkey_pair_name }}-{{ region | default(aws_region) }} + - name: Create temporary directory to save private key in + ansible.builtin.tempfile: + suffix: .key + state: directory + register: test_deploy_flask_app__tmpdir + notify: 'Delete temporary key pair directory' - name: Create key pair to connect to the VM amazon.aws.ec2_key: @@ -209,9 +177,8 @@ - name: Save private key into file ansible.builtin.copy: content: "{{ rsa_key.key.private_key }}" - dest: "{{ sshkey_file }}" + dest: "{{ test_deploy_flask_app__tmpdir.path }}/id_rsa" mode: 0400 - when: rsa_key is changed - name: Create a virtual machine amazon.aws.ec2_instance: diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml index 0b013d3d..cb841a8b 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml @@ -8,11 +8,6 @@ region: "{{ aws_region }}" block: - - name: Fail when 'resource_prefix' is not defined - ansible.builtin.fail: - msg: resource prefix should be defined as resource_prefix - when: resource_prefix is not defined - - name: Run operation create ansible.builtin.include_tasks: "create.yaml" @@ -24,6 +19,7 @@ deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" deploy_flask_app_vm_info: "{{ vm_result }}" deploy_flask_app_rds_info: "{{ rds_result }}" + deploy_flask_app_bastion_ssh_private_key: "{{ test_deploy_flask_app__tmpdir.path }}/id_rsa" - name: Check that a page returns successfully ansible.builtin.uri: From 35bc4746bad9255b20624e710446e5ec6ee142bf Mon Sep 17 00:00:00 2001 From: abikouo Date: Tue, 19 Dec 2023 16:23:47 +0100 Subject: [PATCH 02/51] adding issue number --- .../20231219-deploy_flask_app-update-arguments-spec.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml b/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml index b3e0bc34..ad6a2390 100644 --- a/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml +++ b/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml @@ -3,4 +3,4 @@ breaking_changes: - >- roles/deploy_flask_app - Remove parameter ``deploy_flask_app_sshkey_pair_name`` that was previously required to create ssh connection to the bastion host in favor of the new parameter ``deploy_flask_app_bastion_ssh_private_key`` defining - the path to the ssh private key file to use instead (). + the path to the ssh private key file to use instead (https://github.com/redhat-cop/cloud.aws_ops/issues/103). From d15c275de19ebbf6025fde265b50dcd1866863bd Mon Sep 17 00:00:00 2001 From: abikouo Date: Tue, 19 Dec 2023 17:00:13 +0100 Subject: [PATCH 03/51] Retrieve RDS instance info --- .../targets/test_deploy_flask_app/tasks/create.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml index 90ffe0cb..a0aed839 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml @@ -160,6 +160,10 @@ db_subnet_group_name: "{{ rds_subnet_group_name }}" vpc_security_group_ids: - "{{ rds_sg.group_id }}" + + - name: Get RDS instance info + amazon.aws.rds_instance_info: + db_instance_identifier: "{{ rds_identifier }}" register: rds_result - name: Create temporary directory to save private key in From 83bdcedf7117c6394951450b4927b8119c99b5b6 Mon Sep 17 00:00:00 2001 From: abikouo Date: Tue, 19 Dec 2023 18:21:12 +0100 Subject: [PATCH 04/51] minor updates --- ...1219-deploy_flask_app-update-arguments-spec.yml | 5 ++--- roles/deploy_flask_app/README.md | 1 + roles/deploy_flask_app/defaults/main.yml | 2 ++ roles/deploy_flask_app/meta/argument_specs.yml | 4 ++++ roles/deploy_flask_app/tasks/bastion_setup.yaml | 14 ++++---------- roles/deploy_flask_app/tasks/deploy_app.yaml | 8 +------- roles/deploy_flask_app/templates/inventory.j2 | 2 +- .../test_deploy_flask_app/tasks/create.yaml | 1 + 8 files changed, 16 insertions(+), 21 deletions(-) create mode 100644 roles/deploy_flask_app/defaults/main.yml diff --git a/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml b/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml index ad6a2390..f968915e 100644 --- a/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml +++ b/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml @@ -1,6 +1,5 @@ --- breaking_changes: - >- - roles/deploy_flask_app - Remove parameter ``deploy_flask_app_sshkey_pair_name`` that was previously required to create - ssh connection to the bastion host in favor of the new parameter ``deploy_flask_app_bastion_ssh_private_key`` defining - the path to the ssh private key file to use instead (https://github.com/redhat-cop/cloud.aws_ops/issues/103). + roles/deploy_flask_app - Add parameter ``deploy_flask_app_bastion_ssh_private_key`` to define + the path to the ssh private key file to use to connect to the bastion host (https://github.com/redhat-cop/cloud.aws_ops/issues/103). diff --git a/roles/deploy_flask_app/README.md b/roles/deploy_flask_app/README.md index d69edb33..7105c128 100644 --- a/roles/deploy_flask_app/README.md +++ b/roles/deploy_flask_app/README.md @@ -37,6 +37,7 @@ Role Variables * **deploy_flask_app_bastion_host_name** (str): Name for the EC2 instance. * **deploy_flask_app_bastion_host_username** (str): Username for the bastion host SSH user. * **deploy_flask_app_bastion_host_required_packages** (list): Packages to be installed on the bastion host. +* **deploy_flask_app_sshkey_pair_name** (str): Name for the EC2 key pair. * **deploy_flask_app_bastion_ssh_private_key** (path): The path to the ssh private key file to use to connect to the bastion host. * **deploy_flask_app_number_of_workers** (int): Number of instances to create. * **deploy_flask_app_workers_instance_type** (str): RC2 instance type for workers. diff --git a/roles/deploy_flask_app/defaults/main.yml b/roles/deploy_flask_app/defaults/main.yml new file mode 100644 index 00000000..f57b9c25 --- /dev/null +++ b/roles/deploy_flask_app/defaults/main.yml @@ -0,0 +1,2 @@ +--- +deploy_flask_app_workers_ssh_private_key: "/tmp/id_rsa" diff --git a/roles/deploy_flask_app/meta/argument_specs.yml b/roles/deploy_flask_app/meta/argument_specs.yml index bb10c342..008f629e 100644 --- a/roles/deploy_flask_app/meta/argument_specs.yml +++ b/roles/deploy_flask_app/meta/argument_specs.yml @@ -34,6 +34,10 @@ argument_specs: description: vpc id for the host. type: str required: True + deploy_flask_app_sshkey_pair_name: + description: Name for the EC2 key pair. + type: str + required: True deploy_flask_app_rds_info: description: A dict of information for the backend RDS. This dict has the output of amazon.aws.rds_instance_info module. type: dict diff --git a/roles/deploy_flask_app/tasks/bastion_setup.yaml b/roles/deploy_flask_app/tasks/bastion_setup.yaml index f13e550c..cc394e7f 100644 --- a/roles/deploy_flask_app/tasks/bastion_setup.yaml +++ b/roles/deploy_flask_app/tasks/bastion_setup.yaml @@ -26,13 +26,7 @@ state: present become: true - - name: Generate ssh key for existing user - ansible.builtin.user: - name: "{{ deploy_flask_app_bastion_host_username }}" - state: present - generate_ssh_key: true - - - name: Get content of public key - ansible.builtin.slurp: - src: ~/.ssh/id_rsa.pub - register: deploy_flask_app_sshkey + - name: Copy remote ssh private key file into bastion + ansible.builtin.copy: + src: "{{ deploy_flask_app_bastion_ssh_private_key }}" + dest: "{{ deploy_flask_app_workers_ssh_private_key }}" diff --git a/roles/deploy_flask_app/tasks/deploy_app.yaml b/roles/deploy_flask_app/tasks/deploy_app.yaml index e8691be6..5d964e3d 100644 --- a/roles/deploy_flask_app/tasks/deploy_app.yaml +++ b/roles/deploy_flask_app/tasks/deploy_app.yaml @@ -10,14 +10,8 @@ block: - name: Set variables ansible.builtin.set_fact: - deploy_flask_app_localhost_key_pair: "{{ deploy_flask_app_setup.add_host.host_vars.ansible_host_name }}-key" deploy_flask_app_instance_name: "{{ deploy_flask_app_setup.add_host.host_vars.ansible_host_name }}-workers" - - name: Create key pair to connect to the VM - amazon.aws.ec2_key: - name: "{{ deploy_flask_app_localhost_key_pair }}" - key_material: "{{ deploy_flask_app_sshkey.content | b64decode }}" - - name: List running instances amazon.aws.ec2_instance_info: filters: @@ -34,7 +28,7 @@ name: "{{ deploy_flask_app_instance_name }}" instance_type: "{{ deploy_flask_app_workers_instance_type }}" image_id: "{{ deploy_flask_app_setup.add_host.host_vars.host_config.image_id }}" - key_name: "{{ deploy_flask_app_localhost_key_pair }}" + key_name: "{{ deploy_flask_app_sshkey_pair_name }}" subnet_id: "{{ deploy_flask_app_setup.add_host.host_vars.host_config.private_subnet_id }}" network: assign_public_ip: false diff --git a/roles/deploy_flask_app/templates/inventory.j2 b/roles/deploy_flask_app/templates/inventory.j2 index d5fb7eba..80f633bb 100644 --- a/roles/deploy_flask_app/templates/inventory.j2 +++ b/roles/deploy_flask_app/templates/inventory.j2 @@ -1,4 +1,4 @@ [all] {% for item in deploy_flask_app_workers_instances %} -{{ item.instance_id }} workers_hosts="{{ deploy_flask_app_workers_join }}" ansible_ssh_user="{{ deploy_flask_app_workers_user_name }}" ansible_ssh_common_args='-o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no' ansible_host="{{ item.private_ip_address }}" +{{ item.instance_id }} workers_hosts="{{ deploy_flask_app_workers_join }}" ansible_ssh_user="{{ deploy_flask_app_workers_user_name }}" ansible_ssh_common_args='-o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_workers_ssh_private_key }}' ansible_host="{{ item.private_ip_address }}" {% endfor %} diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml index a0aed839..c1c6557f 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml @@ -183,6 +183,7 @@ content: "{{ rsa_key.key.private_key }}" dest: "{{ test_deploy_flask_app__tmpdir.path }}/id_rsa" mode: 0400 + when: rsa_key is changed - name: Create a virtual machine amazon.aws.ec2_instance: From 7719bb8e2b939f2c7de6cd787e95e867773e7f60 Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 20 Dec 2023 09:27:03 +0100 Subject: [PATCH 05/51] setting file permission --- roles/deploy_flask_app/tasks/bastion_setup.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/roles/deploy_flask_app/tasks/bastion_setup.yaml b/roles/deploy_flask_app/tasks/bastion_setup.yaml index cc394e7f..f9e97f0f 100644 --- a/roles/deploy_flask_app/tasks/bastion_setup.yaml +++ b/roles/deploy_flask_app/tasks/bastion_setup.yaml @@ -30,3 +30,4 @@ ansible.builtin.copy: src: "{{ deploy_flask_app_bastion_ssh_private_key }}" dest: "{{ deploy_flask_app_workers_ssh_private_key }}" + mode: 0400 From 30228f8196dd7abfc226f06c5b976b04078af588 Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 20 Dec 2023 13:56:14 +0100 Subject: [PATCH 06/51] Fix issue with delete.yaml --- .../test_deploy_flask_app/tasks/delete.yaml | 117 ++++++++---------- 1 file changed, 54 insertions(+), 63 deletions(-) diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml index ae621d91..c0233c7b 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml @@ -15,52 +15,39 @@ ansible.builtin.set_fact: vpc_id: "{{ vpc.vpcs.0.vpc_id }}" - - name: Get bastion instance info + # Delete EC2 instances + - name: Get EC2 instance info amazon.aws.ec2_instance_info: filters: - instance-type: "{{ bastion_host_type }}" - key-name: "{{ deploy_flask_app_sshkey_pair_name }}" vpc-id: "{{ vpc_id }}" - register: bastion - - - name: Delete EC2 instances with dependant Resources - when: bastion.instances | length == 1 - block: - - name: Set 'instance_host_name' variable - ansible.builtin.set_fact: - instance_host_name: "{{ bastion.instances.0.public_dns_name | split('.') | first }}" - - - name: Delete workers key pair - amazon.aws.ec2_key: - name: "{{ instance_host_name }}-key" - state: absent - - - name: Delete load balancer - amazon.aws.elb_classic_lb: - name: "{{ instance_host_name }}-lb" - wait: true - state: absent - - - name: List workers - amazon.aws.ec2_instance_info: - filters: - tag:Name: "{{ instance_host_name }}-workers" - instance-state-name: running - register: running - - - name: Delete workers - amazon.aws.ec2_instance: - instance_ids: "{{ running.instances | map(attribute='instance_id') | list }}" - wait: true - state: terminated - - - name: Delete bastion host - amazon.aws.ec2_instance: - instance_ids: - - "{{ bastion.instances.0.instance_id }}" - wait: true - state: terminated + register: ec2_instances + - name: Delete ec2 instances from VPC + amazon.aws.ec2_instance: + instance_ids: "{{ ec2_instances.instances | map(attribute='instance_id') | list }}" + wait: true + state: terminated + when: ec2_instances.instances | length > 0 + + # Delete Load balancer + - name: List Load balancer(s) from VPC + community.aws.elb_classic_lb_info: + register: load_balancers + + - name: Delete load balancer(s) + amazon.aws.elb_classic_lb: + name: "{{ item }}" + wait: true + state: absent + with_items: "{{ load_balancers.elbs | selectattr('vpc_id', 'equalto', vpc_id) | map(attribute='load_balancer_name') | list }}" + + # Delete EC2 key pair + - name: Delete EC2 key pair + amazon.aws.ec2_key: + name: "{{ deploy_flask_app_sshkey_pair_name }}" + state: absent + + # Delete RDS information - name: Delete RDS instance amazon.aws.rds_instance: state: absent @@ -68,29 +55,12 @@ skip_final_snapshot: true wait: true - - name: Delete key pair to connect to the bastion VM - amazon.aws.ec2_key: - name: "{{ deploy_flask_app_sshkey_pair_name }}" - state: absent - - name: Delete RDS subnet group amazon.aws.rds_subnet_group: name: "{{ rds_subnet_group_name }}" state: absent - - name: List Security group from VPC - amazon.aws.ec2_security_group_info: - filters: - vpc-id: "{{ vpc_id }}" - tag:prefix: "{{ resource_prefix }}" - register: secgroups - - - name: Delete security groups - amazon.aws.ec2_security_group: - state: absent - group_id: "{{ item }}" - with_items: "{{ secgroups.security_groups | map(attribute='group_id') | list }}" - + # Delete VPC route table - name: List routes table from VPC amazon.aws.ec2_vpc_route_table_info: filters: @@ -106,6 +76,7 @@ state: absent with_items: "{{ route_table.route_tables | map(attribute='id') | list }}" + # Delete VPC route table - name: Get NAT gateway amazon.aws.ec2_vpc_nat_gateway_info: filters: @@ -119,20 +90,40 @@ wait: true with_items: "{{ nat_gw.result | map(attribute='nat_gateway_id') | list }}" + # Delete Internet gateway - name: Delete internet gateway amazon.aws.ec2_vpc_igw: vpc_id: "{{ vpc_id }}" state: absent + # Delete Subnets + - name: List Subnets from VPC + amazon.aws.ec2_vpc_subnet_info: + filters: + vpc-id: "{{ vpc_id }}" + register: vpc_subnets + - name: Delete subnets amazon.aws.ec2_vpc_subnet: cidr: "{{ item }}" state: absent vpc_id: "{{ vpc_id }}" - with_items: "{{ subnet_cidr }}" + with_items: "{{ vpc_subnets.subnets | map(attribute='cidr_block') | list }}" + + # Delete Security groups + - name: List Security group from VPC + amazon.aws.ec2_security_group_info: + filters: + vpc-id: "{{ vpc_id }}" + register: secgroups + + - name: Delete security groups + amazon.aws.ec2_security_group: + state: absent + group_id: "{{ item }}" + with_items: "{{ secgroups.security_groups | rejectattr('group_name', 'equalto', 'default') | map(attribute='group_id') | list }}" - # As ec2_vpc_route_table can't delete route table, the vpc still has dependencies and cannot be deleted. - # You need to do it delete it manually using either the console or the cli. + # Delete VPC - name: Delete VPC amazon.aws.ec2_vpc_net: name: "{{ vpc_name }}" From 9d11e47fd9be90608254b7cd63f9798ddadfae70 Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 20 Dec 2023 15:25:26 +0100 Subject: [PATCH 07/51] try to ping the workers --- .../deploy_flask_app/tasks/bastion_setup.yaml | 33 -- roles/deploy_flask_app/tasks/deploy_app.yaml | 341 +++++++----------- roles/deploy_flask_app/tasks/main.yaml | 28 +- roles/deploy_flask_app/tasks/setup.yaml | 28 -- roles/deploy_flask_app/tasks/setup_infra.yaml | 86 +++++ roles/deploy_flask_app/templates/inventory.j2 | 4 - roles/deploy_flask_app/templates/vars.yaml.j2 | 8 +- .../targets/test_deploy_flask_app/aliases | 2 +- .../test_deploy_flask_app/defaults/main.yml | 2 + .../test_deploy_flask_app/tasks/create.yaml | 96 ++--- .../test_deploy_flask_app/tasks/main.yaml | 20 +- .../test_deploy_flask_app/vars/main.yaml | 12 +- 12 files changed, 303 insertions(+), 357 deletions(-) delete mode 100644 roles/deploy_flask_app/tasks/bastion_setup.yaml delete mode 100644 roles/deploy_flask_app/tasks/setup.yaml create mode 100644 roles/deploy_flask_app/tasks/setup_infra.yaml delete mode 100644 roles/deploy_flask_app/templates/inventory.j2 diff --git a/roles/deploy_flask_app/tasks/bastion_setup.yaml b/roles/deploy_flask_app/tasks/bastion_setup.yaml deleted file mode 100644 index f9e97f0f..00000000 --- a/roles/deploy_flask_app/tasks/bastion_setup.yaml +++ /dev/null @@ -1,33 +0,0 @@ ---- -- name: Deploy resource from Bastion - delegate_to: bastion - module_defaults: - group/aws: - aws_access_key: "{{ aws_access_key | default(omit) }}" - aws_secret_key: "{{ aws_secret_key | default(omit) }}" - security_token: "{{ security_token | default(omit) }}" - region: "{{ deploy_flask_app_region | default(aws_region) }}" - block: - - name: Update ssh_config - ansible.builtin.lineinfile: - path: /etc/ssh/sshd_config - regex: "{{ item.regex }}" - line: "{{ item.line }}" - loop: - - regex: ^(# *)?ClientAliveInterval - line: ClientAliveInterval 1200 - - regex: ^(# *)?ClientAliveCountMax - line: ClientAliveCountMax 3 - become: true - - - name: Install required packages - ansible.builtin.yum: - name: "{{ deploy_flask_app_bastion_host_required_packages }}" - state: present - become: true - - - name: Copy remote ssh private key file into bastion - ansible.builtin.copy: - src: "{{ deploy_flask_app_bastion_ssh_private_key }}" - dest: "{{ deploy_flask_app_workers_ssh_private_key }}" - mode: 0400 diff --git a/roles/deploy_flask_app/tasks/deploy_app.yaml b/roles/deploy_flask_app/tasks/deploy_app.yaml index 5d964e3d..53f16141 100644 --- a/roles/deploy_flask_app/tasks/deploy_app.yaml +++ b/roles/deploy_flask_app/tasks/deploy_app.yaml @@ -1,219 +1,128 @@ --- -- name: Create Cloud Resources (workers, load balancer, etc) - module_defaults: - group/aws: - aws_access_key: "{{ aws_access_key | default(omit) }}" - aws_secret_key: "{{ aws_secret_key | default(omit) }}" - security_token: "{{ security_token | default(omit) }}" - region: "{{ deploy_flask_app_region | default(aws_region) }}" - - block: - - name: Set variables - ansible.builtin.set_fact: - deploy_flask_app_instance_name: "{{ deploy_flask_app_setup.add_host.host_vars.ansible_host_name }}-workers" - - - name: List running instances - amazon.aws.ec2_instance_info: - filters: - tag:Name: "{{ deploy_flask_app_instance_name }}" - instance-state-name: running - register: deploy_flask_app_vms - - - name: Compute number of instances to create/delete - ansible.builtin.set_fact: - deploy_flask_app_expected_instances: "{{ deploy_flask_app_number_of_workers | int - deploy_flask_app_vms.instances | length }}" - - - name: Create list of targets hosts - amazon.aws.ec2_instance: - name: "{{ deploy_flask_app_instance_name }}" - instance_type: "{{ deploy_flask_app_workers_instance_type }}" - image_id: "{{ deploy_flask_app_setup.add_host.host_vars.host_config.image_id }}" - key_name: "{{ deploy_flask_app_sshkey_pair_name }}" - subnet_id: "{{ deploy_flask_app_setup.add_host.host_vars.host_config.private_subnet_id }}" - network: - assign_public_ip: false - delete_on_termination: true - groups: - - "{{ deploy_flask_app_setup.add_host.host_vars.host_config.group_id }}" - security_groups: - - "{{ deploy_flask_app_setup.add_host.host_vars.host_config.group_id }}" - wait: true - count: "{{ deploy_flask_app_expected_instances }}" - state: started - register: deploy_flask_app_workers - when: deploy_flask_app_expected_instances | int > 0 - - - name: List running instances (once again) - amazon.aws.ec2_instance_info: - filters: - tag:Name: "{{ deploy_flask_app_instance_name }}" - instance-state-name: running - register: deploy_flask_app_vms - - - name: Create list of instances (join) - ansible.builtin.set_fact: - deploy_flask_app_instances_list: [] - - name: Update join_instances - ansible.builtin.set_fact: - deploy_flask_app_instances_list: "{{ deploy_flask_app_instances_list + [item.instance_id + ':' + item.private_ip_address] }}" - with_items: "{{ deploy_flask_app_vms.instances }}" - -- name: Set variables - ansible.builtin.set_fact: - deploy_flask_app_workers_instances: "{{ deploy_flask_app_vms.instances }}" - deploy_flask_app_workers_join: "{{ deploy_flask_app_instances_list | join(',') }}" - -- name: Create inventory file - ansible.builtin.template: - src: inventory.j2 - dest: ~/inventory.ini - mode: 0644 - delegate_to: bastion - -- name: Create vars file +- name: Generate configuration (inventory, vars) from templates ansible.builtin.template: - src: vars.yaml.j2 - dest: ~/vars.yaml - mode: 0644 - delegate_to: bastion - -- name: Create private registry and store webapp container image - delegate_to: bastion - block: - - name: Clone git repository for web application - ansible.builtin.git: - repo: "{{ deploy_flask_app_git_repository }}" - dest: ~/webapp - - - name: Build webapp container image - ansible.builtin.command: - cmd: podman build -t webapp . - args: - chdir: ~/webapp - changed_when: false - - - name: Check running registry - ansible.builtin.shell: - cmd: > - podman container - ps -a - -f name=registry500x - --format=.Names - register: deploy_flask_app_container - become: true - changed_when: false - - - name: Create private registry - become: true - when: - - deploy_flask_app_container.stdout == "" - block: - - name: Create folders for the registry - ansible.builtin.file: - path: /opt/registry/{{ item }} - state: directory - mode: 0644 - with_items: - - auth - - certs - - data - - - name: Generate credentials for accessing the registry - ansible.builtin.shell: - cmd: > - htpasswd -bBc /opt/registry/auth/htpasswd - {{ deploy_flask_app_local_registry_user }} - {{ deploy_flask_app_local_registry_pwd }} - changed_when: false - - - name: Start the registry - ansible.builtin.shell: - cmd: > - podman run --name registry500x - -p {{ deploy_flask_app_listening_port }}:5000 - -v /opt/registry/data:/var/lib/registry:z - -v /opt/registry/auth:/auth:z - -e "REGISTRY_AUTH=htpasswd" - -e "REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm" - -e REGISTRY_AUTH_HTPASSWD_PATH=/auth/htpasswd - -e REGISTRY_COMPATIBILITY_SCHEMA1_ENABLED=true - -d - docker.io/library/registry:latest - changed_when: false - - - name: Push image into private registry - ansible.builtin.shell: - cmd: > - podman login 127.0.0.1:{{ deploy_flask_app_listening_port }} -u '{{ deploy_flask_app_local_registry_user }}' -p '{{ deploy_flask_app_local_registry_pwd }}' --tls-verify=false && - podman tag webapp 127.0.0.1:{{ deploy_flask_app_listening_port }}/ansible-webapp && - podman push 127.0.0.1:{{ deploy_flask_app_listening_port }}/ansible-webapp --tls-verify=false - changed_when: false - -- name: Initialize database tables - ansible.builtin.shell: - cmd: > - podman run --rm - -e FLASK_APP="{{ deploy_flask_app_config.app_dir }}" - -e FLASK_ENV="{{ deploy_flask_app_config.env }}" - -e DATABASE_HOST="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.host }}" - -e DATABASE_INSTANCE="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.name }}" - -e DATABASE_USER="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.master_username }}" - -e DATABASE_PASSWORD="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.master_user_password }}" - -e ADMIN_USER="{{ deploy_flask_app_config.admin_user }}" - -e ADMIN_PASSWORD="{{ deploy_flask_app_config.admin_password }}" - -e WORKER_HOSTNAME="{{ inventory_hostname }}" - -e WORKERS_HOSTS="bastion" - webapp flask {{ deploy_flask_app_force_init | bool | ternary('force-init-db', 'init-db') }} - run_once: true - changed_when: false - delegate_to: bastion - -- name: Copy playbook into bastion host - ansible.builtin.copy: - src: run_app.yaml - dest: ~/playbook.yaml + src: "{{ item.src }}" + dest: "{{ item.dest }}" mode: 0644 - delegate_to: bastion - -- name: Deploy application into workers - ansible.builtin.shell: - cmd: > - ansible-playbook playbook.yaml -i inventory.ini -vvv - -e '@vars.yaml' - -e registry_host_port='{{ deploy_flask_app_setup.add_host.host_vars.host_config.private_ip }}:{{ deploy_flask_app_listening_port }}' - args: - chdir: ~/ - changed_when: false - delegate_to: bastion - -- name: Create load balancer - module_defaults: - group/aws: - aws_access_key: "{{ aws_access_key | default(omit) }}" - aws_secret_key: "{{ aws_secret_key | default(omit) }}" - security_token: "{{ security_token | default(omit) }}" - region: "{{ deploy_flask_app_region | default(aws_region) }}" - amazon.aws.elb_classic_lb: - state: present - name: "{{ deploy_flask_app_setup.add_host.host_vars.ansible_host_name }}-lb" - listeners: - - load_balancer_port: "{{ deploy_flask_app_listening_port }}" - instance_port: 5000 - protocol: HTTP - instance_protocol: HTTP - instance_ids: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" - security_group_ids: - - "{{ deploy_flask_app_setup.add_host.host_vars.host_config.group_id }}" - subnets: - - "{{ deploy_flask_app_setup.add_host.host_vars.host_config.public_subnet_id }}" - scheme: internet-facing - wait: true - wait_timeout: 360 - retries: 5 - delay: 10 - until: deploy_flask_app_lb_result is successful - register: deploy_flask_app_lb_result - -- name: Debug application url - ansible.builtin.debug: - msg: "Application url: {{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" + with_items: + - src: inventory.yml.j2 + dest: ~/inventory.yml + # - src: vars.yaml.j2 + # dest: vars.yaml + +- name: Ensure workers are reachable from bastion host + ansible.builtin.command: + cmd: "ansible -m ping -i ~/inventory.yml all" + +# - name: Create private registry and store webapp container image +# delegate_to: bastion +# block: +# - name: Clone git repository for web application +# ansible.builtin.git: +# repo: "{{ deploy_flask_app_git_repository }}" +# dest: ~/webapp + +# - name: Build webapp container image +# ansible.builtin.command: +# cmd: podman build -t webapp . +# args: +# chdir: ~/webapp +# changed_when: false + +# - name: Check running registry +# ansible.builtin.shell: +# cmd: > +# podman container +# ps -a +# -f name=registry500x +# --format=.Names +# register: deploy_flask_app_container +# become: true +# changed_when: false + +# - name: Create private registry +# become: true +# when: +# - deploy_flask_app_container.stdout == "" +# block: +# - name: Create folders for the registry +# ansible.builtin.file: +# path: /opt/registry/{{ item }} +# state: directory +# mode: 0644 +# with_items: +# - auth +# - certs +# - data + +# - name: Generate credentials for accessing the registry +# ansible.builtin.shell: +# cmd: > +# htpasswd -bBc /opt/registry/auth/htpasswd +# {{ deploy_flask_app_local_registry_user }} +# {{ deploy_flask_app_local_registry_pwd }} +# changed_when: false + +# - name: Start the registry +# ansible.builtin.shell: +# cmd: > +# podman run --name registry500x +# -p {{ deploy_flask_app_listening_port }}:5000 +# -v /opt/registry/data:/var/lib/registry:z +# -v /opt/registry/auth:/auth:z +# -e "REGISTRY_AUTH=htpasswd" +# -e "REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm" +# -e REGISTRY_AUTH_HTPASSWD_PATH=/auth/htpasswd +# -e REGISTRY_COMPATIBILITY_SCHEMA1_ENABLED=true +# -d +# docker.io/library/registry:latest +# changed_when: false + +# - name: Push image into private registry +# ansible.builtin.shell: +# cmd: > +# podman login 127.0.0.1:{{ deploy_flask_app_listening_port }} -u '{{ deploy_flask_app_local_registry_user }}' -p '{{ deploy_flask_app_local_registry_pwd }}' --tls-verify=false && +# podman tag webapp 127.0.0.1:{{ deploy_flask_app_listening_port }}/ansible-webapp && +# podman push 127.0.0.1:{{ deploy_flask_app_listening_port }}/ansible-webapp --tls-verify=false +# changed_when: false + +# - name: Initialize database tables +# ansible.builtin.shell: +# cmd: > +# podman run --rm +# -e FLASK_APP="{{ deploy_flask_app_config.app_dir }}" +# -e FLASK_ENV="{{ deploy_flask_app_config.env }}" +# -e DATABASE_HOST="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.host }}" +# -e DATABASE_INSTANCE="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.name }}" +# -e DATABASE_USER="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.master_username }}" +# -e DATABASE_PASSWORD="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.master_user_password }}" +# -e ADMIN_USER="{{ deploy_flask_app_config.admin_user }}" +# -e ADMIN_PASSWORD="{{ deploy_flask_app_config.admin_password }}" +# -e WORKER_HOSTNAME="{{ inventory_hostname }}" +# -e WORKERS_HOSTS="bastion" +# webapp flask {{ deploy_flask_app_force_init | bool | ternary('force-init-db', 'init-db') }} +# run_once: true +# changed_when: false +# delegate_to: bastion + +# - name: Copy playbook into bastion host +# ansible.builtin.copy: +# src: run_app.yaml +# dest: ~/playbook.yaml +# mode: 0644 +# delegate_to: bastion + +# - name: Deploy application into workers +# ansible.builtin.shell: +# cmd: > +# ansible-playbook playbook.yaml -i inventory.ini -vvv +# -e '@vars.yaml' +# -e registry_host_port='{{ deploy_flask_app_setup.add_host.host_vars.host_config.private_ip }}:{{ deploy_flask_app_listening_port }}' +# args: +# chdir: ~/ +# changed_when: false +# delegate_to: bastion + +# - name: Debug application url +# ansible.builtin.debug: +# msg: "Application url: {{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" diff --git a/roles/deploy_flask_app/tasks/main.yaml b/roles/deploy_flask_app/tasks/main.yaml index 34df2040..ea223b7f 100644 --- a/roles/deploy_flask_app/tasks/main.yaml +++ b/roles/deploy_flask_app/tasks/main.yaml @@ -2,12 +2,28 @@ - name: Deploy flask app. module_defaults: group/aws: "{{ aws_setup_credentials__output }}" + vars: + deploy_flask_app__resource_prefix: "{{ deploy_flask_app_vm_info.instances.0.public_dns_name | split('.') | first }}" + deploy_flask_app__group_id: "{{ deploy_flask_app_vm_info.instances.0.security_groups[0].group_id }}" + deploy_flask_app__vm_image_id: "{{ deploy_flask_app_vm_info.instances.0.image_id }}" + deploy_flask_app__bastion_public_ip: "{{ deploy_flask_app_vm_info.instances.0.public_ip_address }}" + deploy_flask_app__bastion_private_ip: "{{ deploy_flask_app_vm_info.instances.0.private_ip_address }}" + deploy_flask_app__public_subnet_id: "{{ deploy_flask_app_vm_info.instances.0.subnet_id }}" + deploy_flask_app__private_subnet_id: "{{ deploy_flask_app_vm_info.instances.0.subnet_id }}" + # deploy_flask_app__rds_host: "{{ deploy_flask_app_rds_info.instances.0.endpoint.address }}" + # deploy_flask_app__rds_dbname: "{{ deploy_flask_app_rds_info.instances.0.db_name }}" block: - - name: Create new host in inventory for use in later plays. - ansible.builtin.include_tasks: setup.yaml + - name: Create infrastructure - workers and load balancer + ansible.builtin.include_tasks: setup_infra.yaml - - name: Deploy resource from Bastion - ansible.builtin.include_tasks: bastion_setup.yaml + - name: Add bastion host to inventory + ansible.builtin.include_tasks: update_inventory.yaml - - name: Deploy App - ansible.builtin.include_tasks: deploy_app.yaml + - name: Running from bastion host + delegate_to: bastion + block: + - name: Deploy resource from Bastion + ansible.builtin.include_tasks: setup_bastion.yaml + + - name: Deploy App + ansible.builtin.include_tasks: deploy_app.yaml diff --git a/roles/deploy_flask_app/tasks/setup.yaml b/roles/deploy_flask_app/tasks/setup.yaml deleted file mode 100644 index 7fc72188..00000000 --- a/roles/deploy_flask_app/tasks/setup.yaml +++ /dev/null @@ -1,28 +0,0 @@ ---- -- name: Set 'deploy_flask_app_region' variable - ansible.builtin.set_fact: - deploy_flask_app_region: "{{ deploy_flask_app_region | default(aws_region) }}" - -- name: Create resources playbook - block: - - name: Add host to inventory - ansible.builtin.add_host: - hostname: bastion - ansible_ssh_user: "{{ deploy_flask_app_bastion_host_username }}" - ansible_host: "{{ deploy_flask_app_vm_info.instances.0.public_ip_address }}" - ansible_ssh_common_args: -o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_bastion_ssh_private_key }} - ansible_python_interpreter: auto - ansible_host_name: "{{ deploy_flask_app_vm_info.instances.0.public_dns_name | split('.') | first }}" - host_config: - public_subnet_id: "{{ deploy_flask_app_vm_info.instances.0.subnet_id }}" - private_subnet_id: "{{ deploy_flask_app_private_subnet_id }}" - image_id: "{{ deploy_flask_app_vm_info.instances.0.image_id }}" - group_id: "{{ deploy_flask_app_vm_info.instances.0.security_groups[0].group_id }}" - private_ip: "{{ deploy_flask_app_vm_info.instances.0.private_ip_address }}" - vpc_id: "{{ deploy_flask_app_vpc_id }}" - rds_info: - host: "{{ deploy_flask_app_rds_info.instances.0.endpoint.address }}" - name: "{{ deploy_flask_app_rds_info.instances.0.db_name }}" - master_user_password: "{{ deploy_flask_app_rds_master_password }}" - master_username: "{{ deploy_flask_app_rds_master_username }}" - register: deploy_flask_app_setup diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml new file mode 100644 index 00000000..a8fb99cc --- /dev/null +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -0,0 +1,86 @@ +--- +- name: Create Cloud Resources (workers, load balancer, etc) + module_defaults: + group/aws: + aws_access_key: "{{ aws_access_key | default(omit) }}" + aws_secret_key: "{{ aws_secret_key | default(omit) }}" + security_token: "{{ security_token | default(omit) }}" + region: "{{ deploy_flask_app_region | default(aws_region) }}" + + block: + - name: Set variables + ansible.builtin.set_fact: + deploy_flask_app_instance_name: "{{ deploy_flask_app__resource_prefix }}-workers" + + - name: List running instances + amazon.aws.ec2_instance_info: + filters: + tag:Name: "{{ deploy_flask_app_instance_name }}" + instance-state-name: running + register: deploy_flask_app_vms + + - name: Compute number of instances to create/delete + ansible.builtin.set_fact: + deploy_flask_app_expected_instances: "{{ deploy_flask_app_number_of_workers | int - deploy_flask_app_vms.instances | length }}" + + - name: Create list of targets hosts + amazon.aws.ec2_instance: + name: "{{ deploy_flask_app_instance_name }}" + instance_type: "{{ deploy_flask_app_workers_instance_type }}" + image_id: "{{ deploy_flask_app__vm_image_id }}" + key_name: "{{ deploy_flask_app_sshkey_pair_name }}" + subnet_id: "{{ deploy_flask_app_private_subnet_id }}" + network: + assign_public_ip: false + delete_on_termination: true + groups: + - "{{ deploy_flask_app__group_id }}" + security_groups: + - "{{ deploy_flask_app__group_id }}" + wait: true + count: "{{ deploy_flask_app_expected_instances }}" + state: started + register: deploy_flask_app_workers + when: deploy_flask_app_expected_instances | int > 0 + + - name: List running instances (once again) + amazon.aws.ec2_instance_info: + filters: + tag:Name: "{{ deploy_flask_app_instance_name }}" + instance-state-name: running + register: deploy_flask_app_vms + + - name: Create list of instances (join) + ansible.builtin.set_fact: + deploy_flask_app_instances_list: [] + - name: Update join_instances + ansible.builtin.set_fact: + deploy_flask_app_instances_list: "{{ deploy_flask_app_instances_list + [item.instance_id + ':' + item.private_ip_address] }}" + with_items: "{{ deploy_flask_app_vms.instances }}" + + - name: Set variables + ansible.builtin.set_fact: + deploy_flask_app_workers_instances: "{{ deploy_flask_app_vms.instances }}" + deploy_flask_app_workers_join: "{{ deploy_flask_app_instances_list | join(',') }}" + + - name: Create load balancer + amazon.aws.elb_classic_lb: + state: present + name: "{{ deploy_flask_app__resource_prefix }}-lb" + listeners: + - load_balancer_port: "{{ deploy_flask_app_listening_port }}" + instance_port: 5000 + protocol: HTTP + instance_protocol: HTTP + instance_ids: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" + security_group_ids: + - "{{ deploy_flask_app__group_id }}" + subnets: + - "{{ deploy_flask_app__group_id }}" + scheme: internet-facing + wait: true + wait_timeout: 360 + retries: 5 + delay: 10 + until: deploy_flask_app_lb_result is successful + register: deploy_flask_app_lb_result diff --git a/roles/deploy_flask_app/templates/inventory.j2 b/roles/deploy_flask_app/templates/inventory.j2 deleted file mode 100644 index 80f633bb..00000000 --- a/roles/deploy_flask_app/templates/inventory.j2 +++ /dev/null @@ -1,4 +0,0 @@ -[all] -{% for item in deploy_flask_app_workers_instances %} -{{ item.instance_id }} workers_hosts="{{ deploy_flask_app_workers_join }}" ansible_ssh_user="{{ deploy_flask_app_workers_user_name }}" ansible_ssh_common_args='-o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_workers_ssh_private_key }}' ansible_host="{{ item.private_ip_address }}" -{% endfor %} diff --git a/roles/deploy_flask_app/templates/vars.yaml.j2 b/roles/deploy_flask_app/templates/vars.yaml.j2 index bc6a211d..86b3046f 100644 --- a/roles/deploy_flask_app/templates/vars.yaml.j2 +++ b/roles/deploy_flask_app/templates/vars.yaml.j2 @@ -7,9 +7,9 @@ rds_listening_port: "{{ rds_listening_port }}" application_dir: "{{ deploy_flask_app_config.app_dir }}" application_env: "{{ deploy_flask_app_config.env }}" application_db: - host: "{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.host }}" - instance: "{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.name }}" - dbuser_name: "{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.master_username }}" - dbuser_password: "{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.master_user_password }}" + host: "{{ deploy_flask_app__rds_host }}" + instance: "{{ deploy_flask_app__rds_dbname }}" + dbuser_name: "{{ deploy_flask_app_rds_master_username }}" + dbuser_password: "{{ deploy_flask_app_rds_master_password }}" admin_user: "{{ deploy_flask_app_config.admin_user }}" admin_password: "{{ deploy_flask_app_config.admin_password }}" diff --git a/tests/integration/targets/test_deploy_flask_app/aliases b/tests/integration/targets/test_deploy_flask_app/aliases index 66d29cec..a596690d 100644 --- a/tests/integration/targets/test_deploy_flask_app/aliases +++ b/tests/integration/targets/test_deploy_flask_app/aliases @@ -1,3 +1,3 @@ -cloud/aws +!cloud/aws role/deploy_flask_app time=35m \ No newline at end of file diff --git a/tests/integration/targets/test_deploy_flask_app/defaults/main.yml b/tests/integration/targets/test_deploy_flask_app/defaults/main.yml index 0e6574a5..de27871e 100644 --- a/tests/integration/targets/test_deploy_flask_app/defaults/main.yml +++ b/tests/integration/targets/test_deploy_flask_app/defaults/main.yml @@ -1 +1,3 @@ aws_security_token: '{{ security_token | default(omit) }}' +aws_region: eu-west-2 +resource_prefix: "ansible-test-91376696-fv-az000-111" \ No newline at end of file diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml index c1c6557f..2826932b 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml @@ -48,21 +48,21 @@ az: "{{ shared_az }}" register: private_subnet - - name: Create another private subnet for RDS - amazon.aws.ec2_vpc_subnet: - vpc_id: "{{ vpc.vpc.id }}" - cidr: "{{ subnet_cidr[2] }}" - az: "{{ region_av_zones[1] }}" - register: rds_subnet - - - name: Create subnet group for RDS instance - amazon.aws.rds_subnet_group: - name: "{{ rds_subnet_group_name }}" - description: subnet group for RDS instance to be hidden - subnets: - - "{{ rds_subnet.subnet.id }}" - - "{{ private_subnet.subnet.id }}" - state: present + # - name: Create another private subnet for RDS + # amazon.aws.ec2_vpc_subnet: + # vpc_id: "{{ vpc.vpc.id }}" + # cidr: "{{ subnet_cidr[2] }}" + # az: "{{ region_av_zones[1] }}" + # register: rds_subnet + + # - name: Create subnet group for RDS instance + # amazon.aws.rds_subnet_group: + # name: "{{ rds_subnet_group_name }}" + # description: subnet group for RDS instance to be hidden + # subnets: + # - "{{ rds_subnet.subnet.id }}" + # - "{{ private_subnet.subnet.id }}" + # state: present - name: Create internet gateway attached to the VPC amazon.aws.ec2_vpc_igw: @@ -127,39 +127,39 @@ state: present register: secgroup - - name: Create security group for RDS instance - amazon.aws.ec2_security_group: - name: "{{ rds_secgroup_name }}" - vpc_id: "{{ vpc.vpc.id }}" - description: Security group to allow RDS instance port - rules: - - cidr_ip: 0.0.0.0/0 - proto: tcp - from_port: "{{ rds_listening_port }}" - to_port: "{{ rds_listening_port }}" - tags: "{{ resource_tags }}" - state: present - register: rds_sg - - - name: Create RDS instance (PostGreSQL Database) - amazon.aws.rds_instance: - force_update_password: true - wait: true - allocated_storage: "{{ rds_allocated_storage_gb }}" - backup_retention_period: 0 - db_instance_class: "{{ rds_instance_class }}" - db_instance_identifier: "{{ rds_identifier }}" - db_name: "{{ rds_instance_name }}" - engine: "{{ rds_engine }}" - engine_version: "{{ rds_engine_version }}" - master_user_password: "{{ deploy_flask_app_rds_master_password }}" - master_username: "{{ deploy_flask_app_rds_master_username }}" - monitoring_interval: 0 - storage_type: standard - skip_final_snapshot: true - db_subnet_group_name: "{{ rds_subnet_group_name }}" - vpc_security_group_ids: - - "{{ rds_sg.group_id }}" + # - name: Create security group for RDS instance + # amazon.aws.ec2_security_group: + # name: "{{ rds_secgroup_name }}" + # vpc_id: "{{ vpc.vpc.id }}" + # description: Security group to allow RDS instance port + # rules: + # - cidr_ip: 0.0.0.0/0 + # proto: tcp + # from_port: "{{ rds_listening_port }}" + # to_port: "{{ rds_listening_port }}" + # tags: "{{ resource_tags }}" + # state: present + # register: rds_sg + + # - name: Create RDS instance (PostGreSQL Database) + # amazon.aws.rds_instance: + # force_update_password: true + # wait: true + # allocated_storage: "{{ rds_allocated_storage_gb }}" + # backup_retention_period: 0 + # db_instance_class: "{{ rds_instance_class }}" + # db_instance_identifier: "{{ rds_identifier }}" + # db_name: "{{ rds_instance_name }}" + # engine: "{{ rds_engine }}" + # engine_version: "{{ rds_engine_version }}" + # master_user_password: "{{ deploy_flask_app_rds_master_password }}" + # master_username: "{{ deploy_flask_app_rds_master_username }}" + # monitoring_interval: 0 + # storage_type: standard + # skip_final_snapshot: true + # db_subnet_group_name: "{{ rds_subnet_group_name }}" + # vpc_security_group_ids: + # - "{{ rds_sg.group_id }}" - name: Get RDS instance info amazon.aws.rds_instance_info: diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml index cb841a8b..acbbe366 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml @@ -2,10 +2,8 @@ - name: "Run deploy_flask_app integration tests" module_defaults: group/aws: - aws_access_key: "{{ aws_access_key }}" - aws_secret_key: "{{ aws_secret_key }}" - security_token: "{{ aws_security_token }}" - region: "{{ aws_region }}" + aws_profile: eu_london + region: eu-west-2 block: - name: Run operation create @@ -21,13 +19,13 @@ deploy_flask_app_rds_info: "{{ rds_result }}" deploy_flask_app_bastion_ssh_private_key: "{{ test_deploy_flask_app__tmpdir.path }}/id_rsa" - - name: Check that a page returns successfully - ansible.builtin.uri: - url: "http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" - register: deploy_flask_app_check - until: "deploy_flask_app_check.status == 200" - retries: 5 - delay: 10 + # - name: Check that a page returns successfully + # ansible.builtin.uri: + # url: "http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" + # register: deploy_flask_app_check + # until: "deploy_flask_app_check.status == 200" + # retries: 5 + # delay: 10 always: # Cleanup after ourselves diff --git a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml b/tests/integration/targets/test_deploy_flask_app/vars/main.yaml index 488fd0cc..2edf02bd 100644 --- a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/vars/main.yaml @@ -30,12 +30,12 @@ image_filter: Fedora-Cloud-Base-37-* deploy_flask_app_bastion_host_name: "{{ resource_prefix }}-bastion" deploy_flask_app_bastion_host_username: fedora deploy_flask_app_bastion_host_required_packages: - - python3 - - python-virtualenv - - sshpass - - git - - podman - - httpd-tools + # - python3 + # - python-virtualenv + # - sshpass + # - git + # - podman + # - httpd-tools - ansible deploy_flask_app_sshkey_pair_name: "{{ resource_prefix }}-key" deploy_flask_app_workers_user_name: fedora From 4e5b860a3d13772b0ee2710585968df1b0687c4e Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 20 Dec 2023 15:25:38 +0100 Subject: [PATCH 08/51] try to ping the workers --- .../deploy_flask_app/tasks/setup_bastion.yaml | 30 +++++++++++++++++++ .../tasks/update_inventory.yaml | 21 +++++++++++++ .../templates/inventory.yml.j2 | 8 +++++ 3 files changed, 59 insertions(+) create mode 100644 roles/deploy_flask_app/tasks/setup_bastion.yaml create mode 100644 roles/deploy_flask_app/tasks/update_inventory.yaml create mode 100644 roles/deploy_flask_app/templates/inventory.yml.j2 diff --git a/roles/deploy_flask_app/tasks/setup_bastion.yaml b/roles/deploy_flask_app/tasks/setup_bastion.yaml new file mode 100644 index 00000000..98789440 --- /dev/null +++ b/roles/deploy_flask_app/tasks/setup_bastion.yaml @@ -0,0 +1,30 @@ +--- +- name: Update ssh_config + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regex: "{{ item.regex }}" + line: "{{ item.line }}" + loop: + - regex: ^(# *)?ClientAliveInterval + line: ClientAliveInterval 1200 + - regex: ^(# *)?ClientAliveCountMax + line: ClientAliveCountMax 3 + become: true + +- name: Install required packages + ansible.builtin.yum: + name: "{{ deploy_flask_app_bastion_host_required_packages }}" + state: present + become: true + +- name: Generate ssh configuration for current user + ansible.builtin.user: + generate_ssh_key: true + state: present + name: "{{ deploy_flask_app_bastion_host_username }}" + +- name: Copy remote ssh private key file into bastion + ansible.builtin.copy: + src: "{{ deploy_flask_app_bastion_ssh_private_key }}" + dest: "{{ deploy_flask_app_workers_ssh_private_key }}" + mode: 0400 diff --git a/roles/deploy_flask_app/tasks/update_inventory.yaml b/roles/deploy_flask_app/tasks/update_inventory.yaml new file mode 100644 index 00000000..d6e24c18 --- /dev/null +++ b/roles/deploy_flask_app/tasks/update_inventory.yaml @@ -0,0 +1,21 @@ +--- +- name: Add bastion host into inventory + ansible.builtin.add_host: + hostname: bastion + ansible_ssh_user: "{{ deploy_flask_app_bastion_host_username }}" + ansible_host: "{{ deploy_flask_app__bastion_public_ip }}" + ansible_ssh_common_args: -o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_bastion_ssh_private_key }} + ansible_python_interpreter: auto + ansible_host_name: "{{ deploy_flask_app__resource_prefix }}" + host_config: + public_subnet_id: "{{ deploy_flask_app__public_subnet_id }}" + private_subnet_id: "{{ deploy_flask_app__private_subnet_id }}" + image_id: "{{ deploy_flask_app__vm_image_id }}" + group_id: "{{ deploy_flask_app__group_id }}" + private_ip: "{{ deploy_flask_app__bastion_private_ip }}" + vpc_id: "{{ deploy_flask_app_vpc_id }}" + rds_info: + # host: "{{ deploy_flask_app__rds_host }}" + # name: "{{ deploy_flask_app__rds_dbname }}" + master_user_password: "{{ deploy_flask_app_rds_master_password }}" + master_username: "{{ deploy_flask_app_rds_master_username }}" diff --git a/roles/deploy_flask_app/templates/inventory.yml.j2 b/roles/deploy_flask_app/templates/inventory.yml.j2 new file mode 100644 index 00000000..2311e07f --- /dev/null +++ b/roles/deploy_flask_app/templates/inventory.yml.j2 @@ -0,0 +1,8 @@ +all: + hosts: + {{ item.instance_id }}: + ansible_host: "{{ item.private_ip_address }}" + workers_hosts: "{{ deploy_flask_app_workers_join }}" + ansible_ssh_user: "{{ deploy_flask_app_workers_user_name }}" + ansible_ssh_common_args: '-o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_workers_ssh_private_key }}' + ansible_python_interpreter: auto \ No newline at end of file From fff9401c3b4729640e516b427ad88a1d56c728d3 Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 20 Dec 2023 15:26:08 +0100 Subject: [PATCH 09/51] commit with aws alias --- tests/integration/targets/test_deploy_flask_app/aliases | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/targets/test_deploy_flask_app/aliases b/tests/integration/targets/test_deploy_flask_app/aliases index a596690d..de2fdf2f 100644 --- a/tests/integration/targets/test_deploy_flask_app/aliases +++ b/tests/integration/targets/test_deploy_flask_app/aliases @@ -1,3 +1,3 @@ -!cloud/aws +cloud/aws role/deploy_flask_app -time=35m \ No newline at end of file +time=35m From f6d536fa396d54666f69f4647ee034d5a0810125 Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 20 Dec 2023 15:42:47 +0100 Subject: [PATCH 10/51] add credentials --- .../test_deploy_flask_app/defaults/main.yml | 4 +--- .../test_deploy_flask_app/tasks/main.yaml | 21 ++++++++++--------- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/tests/integration/targets/test_deploy_flask_app/defaults/main.yml b/tests/integration/targets/test_deploy_flask_app/defaults/main.yml index de27871e..f5bf1f1b 100644 --- a/tests/integration/targets/test_deploy_flask_app/defaults/main.yml +++ b/tests/integration/targets/test_deploy_flask_app/defaults/main.yml @@ -1,3 +1 @@ -aws_security_token: '{{ security_token | default(omit) }}' -aws_region: eu-west-2 -resource_prefix: "ansible-test-91376696-fv-az000-111" \ No newline at end of file +aws_security_token: '{{ security_token | default(omit) }}' \ No newline at end of file diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml index acbbe366..6213de9b 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml @@ -2,8 +2,10 @@ - name: "Run deploy_flask_app integration tests" module_defaults: group/aws: - aws_profile: eu_london - region: eu-west-2 + aws_access_key: "{{ aws_access_key }}" + aws_secret_key: "{{ aws_secret_key }}" + security_token: "{{ aws_security_token }}" + region: "{{ aws_region }}" block: - name: Run operation create @@ -17,15 +19,14 @@ deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" deploy_flask_app_vm_info: "{{ vm_result }}" deploy_flask_app_rds_info: "{{ rds_result }}" - deploy_flask_app_bastion_ssh_private_key: "{{ test_deploy_flask_app__tmpdir.path }}/id_rsa" - # - name: Check that a page returns successfully - # ansible.builtin.uri: - # url: "http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" - # register: deploy_flask_app_check - # until: "deploy_flask_app_check.status == 200" - # retries: 5 - # delay: 10 + - name: Check that a page returns successfully + ansible.builtin.uri: + url: "http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" + register: deploy_flask_app_check + until: "deploy_flask_app_check.status == 200" + retries: 5 + delay: 10 always: # Cleanup after ourselves From fed9b8e68af40aa20f64de70a0dc7e1d4bed05e9 Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 20 Dec 2023 15:58:31 +0100 Subject: [PATCH 11/51] add path to ssh private key --- .../targets/test_deploy_flask_app/tasks/main.yaml | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml index 6213de9b..9cb03f2b 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml @@ -19,14 +19,15 @@ deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" deploy_flask_app_vm_info: "{{ vm_result }}" deploy_flask_app_rds_info: "{{ rds_result }}" + deploy_flask_app_bastion_ssh_private_key: "{{ test_deploy_flask_app__tmpdir.path }}/id_rsa" - - name: Check that a page returns successfully - ansible.builtin.uri: - url: "http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" - register: deploy_flask_app_check - until: "deploy_flask_app_check.status == 200" - retries: 5 - delay: 10 + # - name: Check that a page returns successfully + # ansible.builtin.uri: + # url: "http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" + # register: deploy_flask_app_check + # until: "deploy_flask_app_check.status == 200" + # retries: 5 + # delay: 10 always: # Cleanup after ourselves From 0b40c4e79fe19e72d34b4be6cc95939ca4fc948d Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 20 Dec 2023 16:11:49 +0100 Subject: [PATCH 12/51] skip load balancer for now --- roles/deploy_flask_app/tasks/setup_infra.yaml | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index a8fb99cc..5b552364 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -63,24 +63,24 @@ deploy_flask_app_workers_instances: "{{ deploy_flask_app_vms.instances }}" deploy_flask_app_workers_join: "{{ deploy_flask_app_instances_list | join(',') }}" - - name: Create load balancer - amazon.aws.elb_classic_lb: - state: present - name: "{{ deploy_flask_app__resource_prefix }}-lb" - listeners: - - load_balancer_port: "{{ deploy_flask_app_listening_port }}" - instance_port: 5000 - protocol: HTTP - instance_protocol: HTTP - instance_ids: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" - security_group_ids: - - "{{ deploy_flask_app__group_id }}" - subnets: - - "{{ deploy_flask_app__group_id }}" - scheme: internet-facing - wait: true - wait_timeout: 360 - retries: 5 - delay: 10 - until: deploy_flask_app_lb_result is successful - register: deploy_flask_app_lb_result + # - name: Create load balancer + # amazon.aws.elb_classic_lb: + # state: present + # name: "{{ deploy_flask_app__resource_prefix }}-lb" + # listeners: + # - load_balancer_port: "{{ deploy_flask_app_listening_port }}" + # instance_port: 5000 + # protocol: HTTP + # instance_protocol: HTTP + # instance_ids: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" + # security_group_ids: + # - "{{ deploy_flask_app__group_id }}" + # subnets: + # - "{{ deploy_flask_app__group_id }}" + # scheme: internet-facing + # wait: true + # wait_timeout: 360 + # retries: 5 + # delay: 10 + # until: deploy_flask_app_lb_result is successful + # register: deploy_flask_app_lb_result From eb7c6f3cb208ce2612bf965603d92bfd9778bcd1 Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 20 Dec 2023 16:25:25 +0100 Subject: [PATCH 13/51] update subnet value when creating load balancer --- roles/deploy_flask_app/tasks/setup_infra.yaml | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index 5b552364..7b7ed634 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -63,24 +63,24 @@ deploy_flask_app_workers_instances: "{{ deploy_flask_app_vms.instances }}" deploy_flask_app_workers_join: "{{ deploy_flask_app_instances_list | join(',') }}" - # - name: Create load balancer - # amazon.aws.elb_classic_lb: - # state: present - # name: "{{ deploy_flask_app__resource_prefix }}-lb" - # listeners: - # - load_balancer_port: "{{ deploy_flask_app_listening_port }}" - # instance_port: 5000 - # protocol: HTTP - # instance_protocol: HTTP - # instance_ids: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" - # security_group_ids: - # - "{{ deploy_flask_app__group_id }}" - # subnets: - # - "{{ deploy_flask_app__group_id }}" - # scheme: internet-facing - # wait: true - # wait_timeout: 360 - # retries: 5 - # delay: 10 - # until: deploy_flask_app_lb_result is successful - # register: deploy_flask_app_lb_result + - name: Create load balancer + amazon.aws.elb_classic_lb: + state: present + name: "{{ deploy_flask_app__resource_prefix }}-lb" + listeners: + - load_balancer_port: "{{ deploy_flask_app_listening_port }}" + instance_port: 5000 + protocol: HTTP + instance_protocol: HTTP + instance_ids: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" + security_group_ids: + - "{{ deploy_flask_app__group_id }}" + subnets: + - "{{ deploy_flask_app__public_subnet_id }}" + scheme: internet-facing + wait: true + wait_timeout: 360 + retries: 5 + delay: 10 + until: deploy_flask_app_lb_result is successful + register: deploy_flask_app_lb_result From 0e09fe57f1b923ea0fd808ef13b0ab98955b522b Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 20 Dec 2023 16:53:08 +0100 Subject: [PATCH 14/51] fix inventory template issue --- roles/deploy_flask_app/templates/inventory.yml.j2 | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/roles/deploy_flask_app/templates/inventory.yml.j2 b/roles/deploy_flask_app/templates/inventory.yml.j2 index 2311e07f..a4ec87ec 100644 --- a/roles/deploy_flask_app/templates/inventory.yml.j2 +++ b/roles/deploy_flask_app/templates/inventory.yml.j2 @@ -1,8 +1,10 @@ all: hosts: + {% for item in deploy_flask_app_workers_instances %} {{ item.instance_id }}: ansible_host: "{{ item.private_ip_address }}" workers_hosts: "{{ deploy_flask_app_workers_join }}" ansible_ssh_user: "{{ deploy_flask_app_workers_user_name }}" ansible_ssh_common_args: '-o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_workers_ssh_private_key }}' - ansible_python_interpreter: auto \ No newline at end of file + ansible_python_interpreter: auto + {% endfor %} \ No newline at end of file From 2552c00fb10266ba23c0952810b58f112e6a50eb Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 20 Dec 2023 17:12:30 +0100 Subject: [PATCH 15/51] fix inventory --- roles/deploy_flask_app/templates/inventory.j2 | 4 ++++ roles/deploy_flask_app/templates/inventory.yml.j2 | 10 ---------- 2 files changed, 4 insertions(+), 10 deletions(-) create mode 100644 roles/deploy_flask_app/templates/inventory.j2 delete mode 100644 roles/deploy_flask_app/templates/inventory.yml.j2 diff --git a/roles/deploy_flask_app/templates/inventory.j2 b/roles/deploy_flask_app/templates/inventory.j2 new file mode 100644 index 00000000..d5fb7eba --- /dev/null +++ b/roles/deploy_flask_app/templates/inventory.j2 @@ -0,0 +1,4 @@ +[all] +{% for item in deploy_flask_app_workers_instances %} +{{ item.instance_id }} workers_hosts="{{ deploy_flask_app_workers_join }}" ansible_ssh_user="{{ deploy_flask_app_workers_user_name }}" ansible_ssh_common_args='-o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no' ansible_host="{{ item.private_ip_address }}" +{% endfor %} diff --git a/roles/deploy_flask_app/templates/inventory.yml.j2 b/roles/deploy_flask_app/templates/inventory.yml.j2 deleted file mode 100644 index a4ec87ec..00000000 --- a/roles/deploy_flask_app/templates/inventory.yml.j2 +++ /dev/null @@ -1,10 +0,0 @@ -all: - hosts: - {% for item in deploy_flask_app_workers_instances %} - {{ item.instance_id }}: - ansible_host: "{{ item.private_ip_address }}" - workers_hosts: "{{ deploy_flask_app_workers_join }}" - ansible_ssh_user: "{{ deploy_flask_app_workers_user_name }}" - ansible_ssh_common_args: '-o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_workers_ssh_private_key }}' - ansible_python_interpreter: auto - {% endfor %} \ No newline at end of file From 1228fac555a0db357acc1a3841eaaa85add01338 Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 20 Dec 2023 17:12:50 +0100 Subject: [PATCH 16/51] fix inventory --- roles/deploy_flask_app/tasks/deploy_app.yaml | 6 +++--- roles/deploy_flask_app/templates/inventory.j2 | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/roles/deploy_flask_app/tasks/deploy_app.yaml b/roles/deploy_flask_app/tasks/deploy_app.yaml index 53f16141..3b886026 100644 --- a/roles/deploy_flask_app/tasks/deploy_app.yaml +++ b/roles/deploy_flask_app/tasks/deploy_app.yaml @@ -5,14 +5,14 @@ dest: "{{ item.dest }}" mode: 0644 with_items: - - src: inventory.yml.j2 - dest: ~/inventory.yml + - src: inventory.j2 + dest: ~/inventory.ini # - src: vars.yaml.j2 # dest: vars.yaml - name: Ensure workers are reachable from bastion host ansible.builtin.command: - cmd: "ansible -m ping -i ~/inventory.yml all" + cmd: "ansible -m ping -i ~/inventory.ini all" # - name: Create private registry and store webapp container image # delegate_to: bastion diff --git a/roles/deploy_flask_app/templates/inventory.j2 b/roles/deploy_flask_app/templates/inventory.j2 index d5fb7eba..80f633bb 100644 --- a/roles/deploy_flask_app/templates/inventory.j2 +++ b/roles/deploy_flask_app/templates/inventory.j2 @@ -1,4 +1,4 @@ [all] {% for item in deploy_flask_app_workers_instances %} -{{ item.instance_id }} workers_hosts="{{ deploy_flask_app_workers_join }}" ansible_ssh_user="{{ deploy_flask_app_workers_user_name }}" ansible_ssh_common_args='-o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no' ansible_host="{{ item.private_ip_address }}" +{{ item.instance_id }} workers_hosts="{{ deploy_flask_app_workers_join }}" ansible_ssh_user="{{ deploy_flask_app_workers_user_name }}" ansible_ssh_common_args='-o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_workers_ssh_private_key }}' ansible_host="{{ item.private_ip_address }}" {% endfor %} From 3d8f72c5bfe1ed25a81c34fbaedc34a3901c1a2f Mon Sep 17 00:00:00 2001 From: abikouo Date: Thu, 21 Dec 2023 07:43:30 +0100 Subject: [PATCH 17/51] full integration --- roles/deploy_flask_app/tasks/build_app.yaml | 69 ++++++++ roles/deploy_flask_app/tasks/deploy_app.yaml | 151 +++++------------- roles/deploy_flask_app/tasks/main.yaml | 7 +- .../tasks/update_inventory.yaml | 4 +- .../test_deploy_flask_app/defaults/main.yml | 2 +- .../test_deploy_flask_app/tasks/create.yaml | 96 +++++------ .../test_deploy_flask_app/tasks/main.yaml | 14 +- .../test_deploy_flask_app/vars/main.yaml | 12 +- 8 files changed, 176 insertions(+), 179 deletions(-) create mode 100644 roles/deploy_flask_app/tasks/build_app.yaml diff --git a/roles/deploy_flask_app/tasks/build_app.yaml b/roles/deploy_flask_app/tasks/build_app.yaml new file mode 100644 index 00000000..cd7b77ae --- /dev/null +++ b/roles/deploy_flask_app/tasks/build_app.yaml @@ -0,0 +1,69 @@ +--- +- name: Clone git repository for web application + ansible.builtin.git: + repo: "{{ deploy_flask_app_git_repository }}" + dest: ~/webapp + +- name: Build webapp container image + ansible.builtin.command: + cmd: podman build -t webapp . + args: + chdir: ~/webapp + changed_when: false + +- name: Check running registry + ansible.builtin.shell: + cmd: > + podman container + ps -a + -f name=registry500x + --format=.Names + register: deploy_flask_app_container + become: true + changed_when: false + +- name: Create private registry + become: true + when: + - deploy_flask_app_container.stdout == "" + block: + - name: Create folders for the registry + ansible.builtin.file: + path: /opt/registry/{{ item }} + state: directory + mode: 0644 + with_items: + - auth + - certs + - data + + - name: Generate credentials for accessing the registry + ansible.builtin.shell: + cmd: > + htpasswd -bBc /opt/registry/auth/htpasswd + {{ deploy_flask_app_local_registry_user }} + {{ deploy_flask_app_local_registry_pwd }} + changed_when: false + + - name: Start the registry + ansible.builtin.shell: + cmd: > + podman run --name registry500x + -p {{ deploy_flask_app_listening_port }}:5000 + -v /opt/registry/data:/var/lib/registry:z + -v /opt/registry/auth:/auth:z + -e "REGISTRY_AUTH=htpasswd" + -e "REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm" + -e REGISTRY_AUTH_HTPASSWD_PATH=/auth/htpasswd + -e REGISTRY_COMPATIBILITY_SCHEMA1_ENABLED=true + -d + docker.io/library/registry:latest + changed_when: false + +- name: Push image into private registry + ansible.builtin.shell: + cmd: > + podman login 127.0.0.1:{{ deploy_flask_app_listening_port }} -u '{{ deploy_flask_app_local_registry_user }}' -p '{{ deploy_flask_app_local_registry_pwd }}' --tls-verify=false && + podman tag webapp 127.0.0.1:{{ deploy_flask_app_listening_port }}/ansible-webapp && + podman push 127.0.0.1:{{ deploy_flask_app_listening_port }}/ansible-webapp --tls-verify=false + changed_when: false diff --git a/roles/deploy_flask_app/tasks/deploy_app.yaml b/roles/deploy_flask_app/tasks/deploy_app.yaml index 3b886026..058fccae 100644 --- a/roles/deploy_flask_app/tasks/deploy_app.yaml +++ b/roles/deploy_flask_app/tasks/deploy_app.yaml @@ -7,122 +7,47 @@ with_items: - src: inventory.j2 dest: ~/inventory.ini - # - src: vars.yaml.j2 - # dest: vars.yaml + - src: vars.yaml.j2 + dest: vars.yaml - name: Ensure workers are reachable from bastion host ansible.builtin.command: cmd: "ansible -m ping -i ~/inventory.ini all" -# - name: Create private registry and store webapp container image -# delegate_to: bastion -# block: -# - name: Clone git repository for web application -# ansible.builtin.git: -# repo: "{{ deploy_flask_app_git_repository }}" -# dest: ~/webapp - -# - name: Build webapp container image -# ansible.builtin.command: -# cmd: podman build -t webapp . -# args: -# chdir: ~/webapp -# changed_when: false - -# - name: Check running registry -# ansible.builtin.shell: -# cmd: > -# podman container -# ps -a -# -f name=registry500x -# --format=.Names -# register: deploy_flask_app_container -# become: true -# changed_when: false - -# - name: Create private registry -# become: true -# when: -# - deploy_flask_app_container.stdout == "" -# block: -# - name: Create folders for the registry -# ansible.builtin.file: -# path: /opt/registry/{{ item }} -# state: directory -# mode: 0644 -# with_items: -# - auth -# - certs -# - data - -# - name: Generate credentials for accessing the registry -# ansible.builtin.shell: -# cmd: > -# htpasswd -bBc /opt/registry/auth/htpasswd -# {{ deploy_flask_app_local_registry_user }} -# {{ deploy_flask_app_local_registry_pwd }} -# changed_when: false - -# - name: Start the registry -# ansible.builtin.shell: -# cmd: > -# podman run --name registry500x -# -p {{ deploy_flask_app_listening_port }}:5000 -# -v /opt/registry/data:/var/lib/registry:z -# -v /opt/registry/auth:/auth:z -# -e "REGISTRY_AUTH=htpasswd" -# -e "REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm" -# -e REGISTRY_AUTH_HTPASSWD_PATH=/auth/htpasswd -# -e REGISTRY_COMPATIBILITY_SCHEMA1_ENABLED=true -# -d -# docker.io/library/registry:latest -# changed_when: false - -# - name: Push image into private registry -# ansible.builtin.shell: -# cmd: > -# podman login 127.0.0.1:{{ deploy_flask_app_listening_port }} -u '{{ deploy_flask_app_local_registry_user }}' -p '{{ deploy_flask_app_local_registry_pwd }}' --tls-verify=false && -# podman tag webapp 127.0.0.1:{{ deploy_flask_app_listening_port }}/ansible-webapp && -# podman push 127.0.0.1:{{ deploy_flask_app_listening_port }}/ansible-webapp --tls-verify=false -# changed_when: false - -# - name: Initialize database tables -# ansible.builtin.shell: -# cmd: > -# podman run --rm -# -e FLASK_APP="{{ deploy_flask_app_config.app_dir }}" -# -e FLASK_ENV="{{ deploy_flask_app_config.env }}" -# -e DATABASE_HOST="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.host }}" -# -e DATABASE_INSTANCE="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.name }}" -# -e DATABASE_USER="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.master_username }}" -# -e DATABASE_PASSWORD="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.master_user_password }}" -# -e ADMIN_USER="{{ deploy_flask_app_config.admin_user }}" -# -e ADMIN_PASSWORD="{{ deploy_flask_app_config.admin_password }}" -# -e WORKER_HOSTNAME="{{ inventory_hostname }}" -# -e WORKERS_HOSTS="bastion" -# webapp flask {{ deploy_flask_app_force_init | bool | ternary('force-init-db', 'init-db') }} -# run_once: true -# changed_when: false -# delegate_to: bastion - -# - name: Copy playbook into bastion host -# ansible.builtin.copy: -# src: run_app.yaml -# dest: ~/playbook.yaml -# mode: 0644 -# delegate_to: bastion - -# - name: Deploy application into workers -# ansible.builtin.shell: -# cmd: > -# ansible-playbook playbook.yaml -i inventory.ini -vvv -# -e '@vars.yaml' -# -e registry_host_port='{{ deploy_flask_app_setup.add_host.host_vars.host_config.private_ip }}:{{ deploy_flask_app_listening_port }}' -# args: -# chdir: ~/ -# changed_when: false -# delegate_to: bastion +- name: Initialize database tables + ansible.builtin.shell: + cmd: > + podman run --rm + -e FLASK_APP="{{ deploy_flask_app_config.app_dir }}" + -e FLASK_ENV="{{ deploy_flask_app_config.env }}" + -e DATABASE_HOST="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.host }}" + -e DATABASE_INSTANCE="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.name }}" + -e DATABASE_USER="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.master_username }}" + -e DATABASE_PASSWORD="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.master_user_password }}" + -e ADMIN_USER="{{ deploy_flask_app_config.admin_user }}" + -e ADMIN_PASSWORD="{{ deploy_flask_app_config.admin_password }}" + -e WORKER_HOSTNAME="{{ inventory_hostname }}" + -e WORKERS_HOSTS="bastion" + webapp flask {{ deploy_flask_app_force_init | bool | ternary('force-init-db', 'init-db') }} + run_once: true + changed_when: false + +- name: Copy playbook into bastion host + ansible.builtin.copy: + src: run_app.yaml + dest: ~/playbook.yaml + mode: 0644 -# - name: Debug application url -# ansible.builtin.debug: -# msg: "Application url: {{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" +- name: Deploy application into workers + ansible.builtin.shell: + cmd: > + ansible-playbook playbook.yaml -i inventory.ini -vvv + -e '@vars.yaml' + -e registry_host_port='{{ deploy_flask_app_setup.add_host.host_vars.host_config.private_ip }}:{{ deploy_flask_app_listening_port }}' + args: + chdir: ~/ + changed_when: false + +- name: Debug application url + ansible.builtin.debug: + msg: "Application url: {{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" diff --git a/roles/deploy_flask_app/tasks/main.yaml b/roles/deploy_flask_app/tasks/main.yaml index ea223b7f..49cc02ff 100644 --- a/roles/deploy_flask_app/tasks/main.yaml +++ b/roles/deploy_flask_app/tasks/main.yaml @@ -10,8 +10,8 @@ deploy_flask_app__bastion_private_ip: "{{ deploy_flask_app_vm_info.instances.0.private_ip_address }}" deploy_flask_app__public_subnet_id: "{{ deploy_flask_app_vm_info.instances.0.subnet_id }}" deploy_flask_app__private_subnet_id: "{{ deploy_flask_app_vm_info.instances.0.subnet_id }}" - # deploy_flask_app__rds_host: "{{ deploy_flask_app_rds_info.instances.0.endpoint.address }}" - # deploy_flask_app__rds_dbname: "{{ deploy_flask_app_rds_info.instances.0.db_name }}" + deploy_flask_app__rds_host: "{{ deploy_flask_app_rds_info.instances.0.endpoint.address }}" + deploy_flask_app__rds_dbname: "{{ deploy_flask_app_rds_info.instances.0.db_name }}" block: - name: Create infrastructure - workers and load balancer ansible.builtin.include_tasks: setup_infra.yaml @@ -25,5 +25,8 @@ - name: Deploy resource from Bastion ansible.builtin.include_tasks: setup_bastion.yaml + - name: Build application and push container image into private registry + ansible.builtin.include_tasks: build_app.yaml + - name: Deploy App ansible.builtin.include_tasks: deploy_app.yaml diff --git a/roles/deploy_flask_app/tasks/update_inventory.yaml b/roles/deploy_flask_app/tasks/update_inventory.yaml index d6e24c18..cc8286df 100644 --- a/roles/deploy_flask_app/tasks/update_inventory.yaml +++ b/roles/deploy_flask_app/tasks/update_inventory.yaml @@ -15,7 +15,7 @@ private_ip: "{{ deploy_flask_app__bastion_private_ip }}" vpc_id: "{{ deploy_flask_app_vpc_id }}" rds_info: - # host: "{{ deploy_flask_app__rds_host }}" - # name: "{{ deploy_flask_app__rds_dbname }}" + host: "{{ deploy_flask_app__rds_host }}" + name: "{{ deploy_flask_app__rds_dbname }}" master_user_password: "{{ deploy_flask_app_rds_master_password }}" master_username: "{{ deploy_flask_app_rds_master_username }}" diff --git a/tests/integration/targets/test_deploy_flask_app/defaults/main.yml b/tests/integration/targets/test_deploy_flask_app/defaults/main.yml index f5bf1f1b..0e6574a5 100644 --- a/tests/integration/targets/test_deploy_flask_app/defaults/main.yml +++ b/tests/integration/targets/test_deploy_flask_app/defaults/main.yml @@ -1 +1 @@ -aws_security_token: '{{ security_token | default(omit) }}' \ No newline at end of file +aws_security_token: '{{ security_token | default(omit) }}' diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml index 2826932b..c1c6557f 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml @@ -48,21 +48,21 @@ az: "{{ shared_az }}" register: private_subnet - # - name: Create another private subnet for RDS - # amazon.aws.ec2_vpc_subnet: - # vpc_id: "{{ vpc.vpc.id }}" - # cidr: "{{ subnet_cidr[2] }}" - # az: "{{ region_av_zones[1] }}" - # register: rds_subnet - - # - name: Create subnet group for RDS instance - # amazon.aws.rds_subnet_group: - # name: "{{ rds_subnet_group_name }}" - # description: subnet group for RDS instance to be hidden - # subnets: - # - "{{ rds_subnet.subnet.id }}" - # - "{{ private_subnet.subnet.id }}" - # state: present + - name: Create another private subnet for RDS + amazon.aws.ec2_vpc_subnet: + vpc_id: "{{ vpc.vpc.id }}" + cidr: "{{ subnet_cidr[2] }}" + az: "{{ region_av_zones[1] }}" + register: rds_subnet + + - name: Create subnet group for RDS instance + amazon.aws.rds_subnet_group: + name: "{{ rds_subnet_group_name }}" + description: subnet group for RDS instance to be hidden + subnets: + - "{{ rds_subnet.subnet.id }}" + - "{{ private_subnet.subnet.id }}" + state: present - name: Create internet gateway attached to the VPC amazon.aws.ec2_vpc_igw: @@ -127,39 +127,39 @@ state: present register: secgroup - # - name: Create security group for RDS instance - # amazon.aws.ec2_security_group: - # name: "{{ rds_secgroup_name }}" - # vpc_id: "{{ vpc.vpc.id }}" - # description: Security group to allow RDS instance port - # rules: - # - cidr_ip: 0.0.0.0/0 - # proto: tcp - # from_port: "{{ rds_listening_port }}" - # to_port: "{{ rds_listening_port }}" - # tags: "{{ resource_tags }}" - # state: present - # register: rds_sg - - # - name: Create RDS instance (PostGreSQL Database) - # amazon.aws.rds_instance: - # force_update_password: true - # wait: true - # allocated_storage: "{{ rds_allocated_storage_gb }}" - # backup_retention_period: 0 - # db_instance_class: "{{ rds_instance_class }}" - # db_instance_identifier: "{{ rds_identifier }}" - # db_name: "{{ rds_instance_name }}" - # engine: "{{ rds_engine }}" - # engine_version: "{{ rds_engine_version }}" - # master_user_password: "{{ deploy_flask_app_rds_master_password }}" - # master_username: "{{ deploy_flask_app_rds_master_username }}" - # monitoring_interval: 0 - # storage_type: standard - # skip_final_snapshot: true - # db_subnet_group_name: "{{ rds_subnet_group_name }}" - # vpc_security_group_ids: - # - "{{ rds_sg.group_id }}" + - name: Create security group for RDS instance + amazon.aws.ec2_security_group: + name: "{{ rds_secgroup_name }}" + vpc_id: "{{ vpc.vpc.id }}" + description: Security group to allow RDS instance port + rules: + - cidr_ip: 0.0.0.0/0 + proto: tcp + from_port: "{{ rds_listening_port }}" + to_port: "{{ rds_listening_port }}" + tags: "{{ resource_tags }}" + state: present + register: rds_sg + + - name: Create RDS instance (PostGreSQL Database) + amazon.aws.rds_instance: + force_update_password: true + wait: true + allocated_storage: "{{ rds_allocated_storage_gb }}" + backup_retention_period: 0 + db_instance_class: "{{ rds_instance_class }}" + db_instance_identifier: "{{ rds_identifier }}" + db_name: "{{ rds_instance_name }}" + engine: "{{ rds_engine }}" + engine_version: "{{ rds_engine_version }}" + master_user_password: "{{ deploy_flask_app_rds_master_password }}" + master_username: "{{ deploy_flask_app_rds_master_username }}" + monitoring_interval: 0 + storage_type: standard + skip_final_snapshot: true + db_subnet_group_name: "{{ rds_subnet_group_name }}" + vpc_security_group_ids: + - "{{ rds_sg.group_id }}" - name: Get RDS instance info amazon.aws.rds_instance_info: diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml index 9cb03f2b..cb841a8b 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml @@ -21,13 +21,13 @@ deploy_flask_app_rds_info: "{{ rds_result }}" deploy_flask_app_bastion_ssh_private_key: "{{ test_deploy_flask_app__tmpdir.path }}/id_rsa" - # - name: Check that a page returns successfully - # ansible.builtin.uri: - # url: "http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" - # register: deploy_flask_app_check - # until: "deploy_flask_app_check.status == 200" - # retries: 5 - # delay: 10 + - name: Check that a page returns successfully + ansible.builtin.uri: + url: "http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" + register: deploy_flask_app_check + until: "deploy_flask_app_check.status == 200" + retries: 5 + delay: 10 always: # Cleanup after ourselves diff --git a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml b/tests/integration/targets/test_deploy_flask_app/vars/main.yaml index 2edf02bd..488fd0cc 100644 --- a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/vars/main.yaml @@ -30,12 +30,12 @@ image_filter: Fedora-Cloud-Base-37-* deploy_flask_app_bastion_host_name: "{{ resource_prefix }}-bastion" deploy_flask_app_bastion_host_username: fedora deploy_flask_app_bastion_host_required_packages: - # - python3 - # - python-virtualenv - # - sshpass - # - git - # - podman - # - httpd-tools + - python3 + - python-virtualenv + - sshpass + - git + - podman + - httpd-tools - ansible deploy_flask_app_sshkey_pair_name: "{{ resource_prefix }}-key" deploy_flask_app_workers_user_name: fedora From 12a48c1700d619295924302ac695eb8d33e6d537 Mon Sep 17 00:00:00 2001 From: abikouo Date: Thu, 21 Dec 2023 13:49:48 +0100 Subject: [PATCH 18/51] install host packages via user_data when creating instances --- roles/deploy_flask_app/files/run_app.yaml | 7 ------- roles/deploy_flask_app/tasks/setup_bastion.yaml | 1 + roles/deploy_flask_app/tasks/setup_infra.yaml | 6 ++++++ .../targets/test_deploy_flask_app/tasks/create.yaml | 5 +++++ .../targets/test_deploy_flask_app/vars/main.yaml | 3 ++- 5 files changed, 14 insertions(+), 8 deletions(-) diff --git a/roles/deploy_flask_app/files/run_app.yaml b/roles/deploy_flask_app/files/run_app.yaml index b221927b..fc642d19 100644 --- a/roles/deploy_flask_app/files/run_app.yaml +++ b/roles/deploy_flask_app/files/run_app.yaml @@ -20,13 +20,6 @@ - regex: ^(# *)?ClientAliveCountMax line: ClientAliveCountMax 3 - - name: Install Podman - ansible.builtin.yum: - name: - - podman - update_cache: True - state: present - - name: Pull image from private registry ansible.builtin.shell: cmd: > diff --git a/roles/deploy_flask_app/tasks/setup_bastion.yaml b/roles/deploy_flask_app/tasks/setup_bastion.yaml index 98789440..eb5ec09e 100644 --- a/roles/deploy_flask_app/tasks/setup_bastion.yaml +++ b/roles/deploy_flask_app/tasks/setup_bastion.yaml @@ -16,6 +16,7 @@ name: "{{ deploy_flask_app_bastion_host_required_packages }}" state: present become: true + when: deploy_flask_app_bastion_host_required_packages | length > 0 - name: Generate ssh configuration for current user ansible.builtin.user: diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index 7b7ed634..5e6c684b 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -39,6 +39,12 @@ - "{{ deploy_flask_app__group_id }}" wait: true count: "{{ deploy_flask_app_expected_instances }}" + user_data: | + #cloud-config + package_upgrade: true + package_update: true + packages: + - podman state: started register: deploy_flask_app_workers when: deploy_flask_app_expected_instances | int > 0 diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml index c1c6557f..457d5389 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml @@ -198,6 +198,11 @@ - "{{ secgroup.group_id }}" security_groups: - "{{ secgroup.group_id }}" + user_data: | + #cloud-config + package_upgrade: true + package_update: true + packages: "{{ deploy_flask_app_bastion_cloud_config_packages }}" wait: true state: started register: vm_result diff --git a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml b/tests/integration/targets/test_deploy_flask_app/vars/main.yaml index 488fd0cc..c6f467df 100644 --- a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/vars/main.yaml @@ -29,7 +29,8 @@ image_filter: Fedora-Cloud-Base-37-* deploy_flask_app_bastion_host_name: "{{ resource_prefix }}-bastion" deploy_flask_app_bastion_host_username: fedora -deploy_flask_app_bastion_host_required_packages: +deploy_flask_app_bastion_host_required_packages: [] +deploy_flask_app_bastion_cloud_config_packages: - python3 - python-virtualenv - sshpass From 7a16851ffe86f4f22780f6af816db471dc4c472b Mon Sep 17 00:00:00 2001 From: abikouo Date: Thu, 21 Dec 2023 16:35:01 +0100 Subject: [PATCH 19/51] add IAM role to install package into bastion --- roles/deploy_flask_app/files/run_app.yaml | 7 +++++++ roles/deploy_flask_app/tasks/setup_infra.yaml | 6 ------ .../files/ec2-trust-policy.json | 13 +++++++++++++ .../test_deploy_flask_app/tasks/create.yaml | 17 +++++++++++++---- .../test_deploy_flask_app/tasks/delete.yaml | 7 +++++++ .../test_deploy_flask_app/vars/main.yaml | 1 + 6 files changed, 41 insertions(+), 10 deletions(-) create mode 100644 tests/integration/targets/test_deploy_flask_app/files/ec2-trust-policy.json diff --git a/roles/deploy_flask_app/files/run_app.yaml b/roles/deploy_flask_app/files/run_app.yaml index fc642d19..b221927b 100644 --- a/roles/deploy_flask_app/files/run_app.yaml +++ b/roles/deploy_flask_app/files/run_app.yaml @@ -20,6 +20,13 @@ - regex: ^(# *)?ClientAliveCountMax line: ClientAliveCountMax 3 + - name: Install Podman + ansible.builtin.yum: + name: + - podman + update_cache: True + state: present + - name: Pull image from private registry ansible.builtin.shell: cmd: > diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index 5e6c684b..7b7ed634 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -39,12 +39,6 @@ - "{{ deploy_flask_app__group_id }}" wait: true count: "{{ deploy_flask_app_expected_instances }}" - user_data: | - #cloud-config - package_upgrade: true - package_update: true - packages: - - podman state: started register: deploy_flask_app_workers when: deploy_flask_app_expected_instances | int > 0 diff --git a/tests/integration/targets/test_deploy_flask_app/files/ec2-trust-policy.json b/tests/integration/targets/test_deploy_flask_app/files/ec2-trust-policy.json new file mode 100644 index 00000000..63d22eae --- /dev/null +++ b/tests/integration/targets/test_deploy_flask_app/files/ec2-trust-policy.json @@ -0,0 +1,13 @@ +{ + "Version": "2008-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Principal": { + "Service": "ec2.amazonaws.com" + }, + "Action": "sts:AssumeRole" + } + ] + } diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml index 457d5389..dfba3004 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml @@ -185,6 +185,15 @@ mode: 0400 when: rsa_key is changed + - name: Ensure IAM instance role exists + amazon.aws.iam_role: + name: "{{ bastion_host_iam_role }}" + assume_role_policy_document: "{{ lookup('file', 'ec2-trust-policy.json') }}" + state: present + create_instance_profile: true + wait: true + register: role_output + - name: Create a virtual machine amazon.aws.ec2_instance: name: "{{ deploy_flask_app_bastion_host_name }}" @@ -192,6 +201,8 @@ image_id: "{{ images.images.0.image_id }}" key_name: "{{ deploy_flask_app_sshkey_pair_name }}" subnet_id: "{{ subnet.subnet.id }}" + ebs_optimized: true + instance_role: "{{ role_output.iam_role.role_name }}" network: assign_public_ip: true groups: @@ -199,10 +210,8 @@ security_groups: - "{{ secgroup.group_id }}" user_data: | - #cloud-config - package_upgrade: true - package_update: true - packages: "{{ deploy_flask_app_bastion_cloud_config_packages }}" + #!/bin/bash + yum install -y python3 python-virtualenv sshpass git podman httpd-tools ansible wait: true state: started register: vm_result diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml index c0233c7b..c4f27422 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml @@ -130,3 +130,10 @@ cidr_block: "{{ vpc_cidr }}" state: absent ignore_errors: true + + # Delete IAM Role + - name: Delete IAM role + amazon.aws.iam_role: + name: "{{ bastion_host_iam_role }}" + state: absent + wait: true diff --git a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml b/tests/integration/targets/test_deploy_flask_app/vars/main.yaml index c6f467df..8f654ac0 100644 --- a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/vars/main.yaml @@ -23,6 +23,7 @@ rds_engine_version: "14.8" bastion_host_type: t3.micro bastion_host_venv_path: ~/env image_filter: Fedora-Cloud-Base-37-* +bastion_host_iam_role: "{{ resource_prefix }}-role" # vars for the deploy_flask_app role and create task # ================================================= From 584a227b536af9da0e5682c8b4cf4b148a27251f Mon Sep 17 00:00:00 2001 From: abikouo Date: Fri, 22 Dec 2023 15:01:18 +0100 Subject: [PATCH 20/51] minor updates --- ...deploy_flask_app-update-arguments-spec.yml | 5 + roles/deploy_flask_app/defaults/main.yml | 3 +- roles/deploy_flask_app/files/run_app.yaml | 68 ------------- .../deploy_flask_app/meta/argument_specs.yml | 21 ---- roles/deploy_flask_app/tasks/build_app.yaml | 69 ------------- roles/deploy_flask_app/tasks/deploy_app.yaml | 99 ++++++++++--------- roles/deploy_flask_app/tasks/main.yaml | 16 +-- .../deploy_flask_app/tasks/setup_bastion.yaml | 31 ------ roles/deploy_flask_app/tasks/setup_infra.yaml | 9 +- .../tasks/update_inventory.yaml | 49 ++++++--- roles/deploy_flask_app/templates/inventory.j2 | 4 - .../deploy_flask_app/templates/ssh_config.j2 | 15 +++ roles/deploy_flask_app/templates/vars.yaml.j2 | 15 --- .../test_deploy_flask_app/vars/main.yaml | 13 --- 14 files changed, 112 insertions(+), 305 deletions(-) delete mode 100644 roles/deploy_flask_app/files/run_app.yaml delete mode 100644 roles/deploy_flask_app/tasks/build_app.yaml delete mode 100644 roles/deploy_flask_app/tasks/setup_bastion.yaml delete mode 100644 roles/deploy_flask_app/templates/inventory.j2 create mode 100644 roles/deploy_flask_app/templates/ssh_config.j2 delete mode 100644 roles/deploy_flask_app/templates/vars.yaml.j2 diff --git a/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml b/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml index f968915e..a8f8cb62 100644 --- a/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml +++ b/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml @@ -3,3 +3,8 @@ breaking_changes: - >- roles/deploy_flask_app - Add parameter ``deploy_flask_app_bastion_ssh_private_key`` to define the path to the ssh private key file to use to connect to the bastion host (https://github.com/redhat-cop/cloud.aws_ops/issues/103). + - >- + roles/deploy_flask_app - The following parameters no longer required have been removed + ``deploy_flask_app_bastion_host_required_packages``, ``deploy_flask_app_local_registry_port``, + ``deploy_flask_app_local_registry_pwd``, ``deploy_flask_app_local_registry_user``, + ``deploy_flask_app_git_repository`` (https://github.com/redhat-cop/cloud.aws_ops/issues/103). diff --git a/roles/deploy_flask_app/defaults/main.yml b/roles/deploy_flask_app/defaults/main.yml index f57b9c25..5fa51023 100644 --- a/roles/deploy_flask_app/defaults/main.yml +++ b/roles/deploy_flask_app/defaults/main.yml @@ -1,2 +1,3 @@ --- -deploy_flask_app_workers_ssh_private_key: "/tmp/id_rsa" +deploy_flask_app_workers_ssh_private_key: /tmp/id_rsa +deploy_flask_app_container_image: docker.io/aubinredhat/webapp:1.0.0 diff --git a/roles/deploy_flask_app/files/run_app.yaml b/roles/deploy_flask_app/files/run_app.yaml deleted file mode 100644 index b221927b..00000000 --- a/roles/deploy_flask_app/files/run_app.yaml +++ /dev/null @@ -1,68 +0,0 @@ ---- -- name: Run app - hosts: all - gather_facts: false - strategy: free - become: true - - vars: - container_name: webapp-container01 - - tasks: - - name: Update ssh_config - ansible.builtin.lineinfile: - path: /etc/ssh/sshd_config - regex: "{{ item.regex }}" - line: "{{ item.line }}" - loop: - - regex: ^(# *)?ClientAliveInterval - line: ClientAliveInterval 1200 - - regex: ^(# *)?ClientAliveCountMax - line: ClientAliveCountMax 3 - - - name: Install Podman - ansible.builtin.yum: - name: - - podman - update_cache: True - state: present - - - name: Pull image from private registry - ansible.builtin.shell: - cmd: > - podman login {{ registry_host_port }} - -u {{ registry_login.user }} - -p {{ registry_login.password }} - --tls-verify=false && - podman pull {{ registry_host_port }}/ansible-webapp - --tls-verify=false - changed_when: false - - - name: Check running container - ansible.builtin.shell: - cmd: > - podman container ps -a - -f name={{ container_name }} - --format=.Names - register: container - changed_when: false - - - name: Run application instance - ansible.builtin.shell: - cmd: > - podman run --rm - -e FLASK_APP="{{ application_dir }}" - -e FLASK_ENV="{{ application_env }}" - -e DATABASE_HOST="{{ application_db.host }}" - -e DATABASE_INSTANCE="{{ application_db.instance }}" - -e DATABASE_USER="{{ application_db.dbuser_name }}" - -e DATABASE_PASSWORD="{{ application_db.dbuser_password }}" - -e ADMIN_USER="{{ application_db.admin_user }}" - -e ADMIN_PASSWORD="{{ application_db.admin_password }}" - -e WORKER_HOSTNAME="{{ inventory_hostname }}" - -e WORKERS_HOSTS="{{ workers_hosts }}" - -p 5000:5000 --name {{ container_name }} - -d {{ registry_host_port }}/ansible-webapp - when: - - container.stdout == "" - changed_when: true diff --git a/roles/deploy_flask_app/meta/argument_specs.yml b/roles/deploy_flask_app/meta/argument_specs.yml index 008f629e..b7d5dc9d 100644 --- a/roles/deploy_flask_app/meta/argument_specs.yml +++ b/roles/deploy_flask_app/meta/argument_specs.yml @@ -21,11 +21,6 @@ argument_specs: type: path required: True version_added: 2.1.0 - deploy_flask_app_bastion_host_required_packages: - description: Packages to be installed on the bastion host. - type: list - elements: str - required: True deploy_flask_app_private_subnet_id: description: Private subnet id of the bastion host. type: str @@ -65,10 +60,6 @@ argument_specs: description: Username for the workers. type: str required: True - deploy_flask_app_git_repository: - description: Git repository to be cloned for the webapp. - type: str - required: True deploy_flask_app_listening_port: description: Load balancer port. type: int @@ -77,18 +68,6 @@ argument_specs: description: A boolean value True to force init the app and False to not force init. type: bool required: True - deploy_flask_app_local_registry_user: - description: Registry user name. - type: str - required: True - deploy_flask_app_local_registry_pwd: - description: Registry password. - type: str - required: True - deploy_flask_app_local_registry_port: - description: Registry port. - type: int - required: True deploy_flask_app_config: description: A dict of config parameterys for the app. type: dict diff --git a/roles/deploy_flask_app/tasks/build_app.yaml b/roles/deploy_flask_app/tasks/build_app.yaml deleted file mode 100644 index cd7b77ae..00000000 --- a/roles/deploy_flask_app/tasks/build_app.yaml +++ /dev/null @@ -1,69 +0,0 @@ ---- -- name: Clone git repository for web application - ansible.builtin.git: - repo: "{{ deploy_flask_app_git_repository }}" - dest: ~/webapp - -- name: Build webapp container image - ansible.builtin.command: - cmd: podman build -t webapp . - args: - chdir: ~/webapp - changed_when: false - -- name: Check running registry - ansible.builtin.shell: - cmd: > - podman container - ps -a - -f name=registry500x - --format=.Names - register: deploy_flask_app_container - become: true - changed_when: false - -- name: Create private registry - become: true - when: - - deploy_flask_app_container.stdout == "" - block: - - name: Create folders for the registry - ansible.builtin.file: - path: /opt/registry/{{ item }} - state: directory - mode: 0644 - with_items: - - auth - - certs - - data - - - name: Generate credentials for accessing the registry - ansible.builtin.shell: - cmd: > - htpasswd -bBc /opt/registry/auth/htpasswd - {{ deploy_flask_app_local_registry_user }} - {{ deploy_flask_app_local_registry_pwd }} - changed_when: false - - - name: Start the registry - ansible.builtin.shell: - cmd: > - podman run --name registry500x - -p {{ deploy_flask_app_listening_port }}:5000 - -v /opt/registry/data:/var/lib/registry:z - -v /opt/registry/auth:/auth:z - -e "REGISTRY_AUTH=htpasswd" - -e "REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm" - -e REGISTRY_AUTH_HTPASSWD_PATH=/auth/htpasswd - -e REGISTRY_COMPATIBILITY_SCHEMA1_ENABLED=true - -d - docker.io/library/registry:latest - changed_when: false - -- name: Push image into private registry - ansible.builtin.shell: - cmd: > - podman login 127.0.0.1:{{ deploy_flask_app_listening_port }} -u '{{ deploy_flask_app_local_registry_user }}' -p '{{ deploy_flask_app_local_registry_pwd }}' --tls-verify=false && - podman tag webapp 127.0.0.1:{{ deploy_flask_app_listening_port }}/ansible-webapp && - podman push 127.0.0.1:{{ deploy_flask_app_listening_port }}/ansible-webapp --tls-verify=false - changed_when: false diff --git a/roles/deploy_flask_app/tasks/deploy_app.yaml b/roles/deploy_flask_app/tasks/deploy_app.yaml index 058fccae..406d32f0 100644 --- a/roles/deploy_flask_app/tasks/deploy_app.yaml +++ b/roles/deploy_flask_app/tasks/deploy_app.yaml @@ -1,53 +1,54 @@ --- -- name: Generate configuration (inventory, vars) from templates - ansible.builtin.template: - src: "{{ item.src }}" - dest: "{{ item.dest }}" - mode: 0644 - with_items: - - src: inventory.j2 - dest: ~/inventory.ini - - src: vars.yaml.j2 - dest: vars.yaml +- name: Deploy application into worker + delegate_to: "{{ worker_id }}" + vars: + deploy_flask_app__worker_container_name: "webapp-container1" + block: + - name: Update ssh_config + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regex: "{{ item.regex }}" + line: "{{ item.line }}" + loop: + - regex: ^(# *)?ClientAliveInterval + line: ClientAliveInterval 1200 + - regex: ^(# *)?ClientAliveCountMax + line: ClientAliveCountMax 3 + become: true -- name: Ensure workers are reachable from bastion host - ansible.builtin.command: - cmd: "ansible -m ping -i ~/inventory.ini all" + - name: Install Podman + ansible.builtin.yum: + name: + - podman + update_cache: False + state: present + become: true -- name: Initialize database tables - ansible.builtin.shell: - cmd: > - podman run --rm - -e FLASK_APP="{{ deploy_flask_app_config.app_dir }}" - -e FLASK_ENV="{{ deploy_flask_app_config.env }}" - -e DATABASE_HOST="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.host }}" - -e DATABASE_INSTANCE="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.name }}" - -e DATABASE_USER="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.master_username }}" - -e DATABASE_PASSWORD="{{ deploy_flask_app_setup.add_host.host_vars.host_config.rds_info.master_user_password }}" - -e ADMIN_USER="{{ deploy_flask_app_config.admin_user }}" - -e ADMIN_PASSWORD="{{ deploy_flask_app_config.admin_password }}" - -e WORKER_HOSTNAME="{{ inventory_hostname }}" - -e WORKERS_HOSTS="bastion" - webapp flask {{ deploy_flask_app_force_init | bool | ternary('force-init-db', 'init-db') }} - run_once: true - changed_when: false + - name: Check running container + ansible.builtin.shell: + cmd: > + podman container ps -a + -f name={{ deploy_flask_app__worker_container_name }} + --format=.Names + register: container + changed_when: false -- name: Copy playbook into bastion host - ansible.builtin.copy: - src: run_app.yaml - dest: ~/playbook.yaml - mode: 0644 - -- name: Deploy application into workers - ansible.builtin.shell: - cmd: > - ansible-playbook playbook.yaml -i inventory.ini -vvv - -e '@vars.yaml' - -e registry_host_port='{{ deploy_flask_app_setup.add_host.host_vars.host_config.private_ip }}:{{ deploy_flask_app_listening_port }}' - args: - chdir: ~/ - changed_when: false - -- name: Debug application url - ansible.builtin.debug: - msg: "Application url: {{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" + - name: Run application instance + ansible.builtin.shell: + cmd: >- + podman run --rm + -e FLASK_APP="{{ deploy_flask_app_config.app_dir }}" + -e FLASK_ENV="{{ deploy_flask_app_config.env }}" + -e DATABASE_HOST="{{ deploy_flask_app__rds_host }}" + -e DATABASE_INSTANCE="{{ deploy_flask_app__rds_dbname }}" + -e DATABASE_USER="{{ deploy_flask_app_rds_master_username }}" + -e DATABASE_PASSWORD="{{deploy_flask_app_rds_master_password }}" + -e ADMIN_USER="{{ deploy_flask_app_config.admin_user }}" + -e ADMIN_PASSWORD="{{ deploy_flask_app_config.admin_password }}" + -e WORKER_HOSTNAME="{{ worker_id }}" + -e WORKERS_HOSTS="{{ deploy_flask_app_instances | join(',') }}" + -p 5000:5000 --name {{ deploy_flask_app__worker_container_name }} + -d {{ deploy_flask_app_container_image }} + when: + - container.stdout == "" + changed_when: true diff --git a/roles/deploy_flask_app/tasks/main.yaml b/roles/deploy_flask_app/tasks/main.yaml index 49cc02ff..328d2a8d 100644 --- a/roles/deploy_flask_app/tasks/main.yaml +++ b/roles/deploy_flask_app/tasks/main.yaml @@ -19,14 +19,8 @@ - name: Add bastion host to inventory ansible.builtin.include_tasks: update_inventory.yaml - - name: Running from bastion host - delegate_to: bastion - block: - - name: Deploy resource from Bastion - ansible.builtin.include_tasks: setup_bastion.yaml - - - name: Build application and push container image into private registry - ansible.builtin.include_tasks: build_app.yaml - - - name: Deploy App - ansible.builtin.include_tasks: deploy_app.yaml + - name: Deploy application into workers + ansible.builtin.include_tasks: deploy_app.yaml + with_items: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" + loop_control: + loop_var: worker_id diff --git a/roles/deploy_flask_app/tasks/setup_bastion.yaml b/roles/deploy_flask_app/tasks/setup_bastion.yaml deleted file mode 100644 index eb5ec09e..00000000 --- a/roles/deploy_flask_app/tasks/setup_bastion.yaml +++ /dev/null @@ -1,31 +0,0 @@ ---- -- name: Update ssh_config - ansible.builtin.lineinfile: - path: /etc/ssh/sshd_config - regex: "{{ item.regex }}" - line: "{{ item.line }}" - loop: - - regex: ^(# *)?ClientAliveInterval - line: ClientAliveInterval 1200 - - regex: ^(# *)?ClientAliveCountMax - line: ClientAliveCountMax 3 - become: true - -- name: Install required packages - ansible.builtin.yum: - name: "{{ deploy_flask_app_bastion_host_required_packages }}" - state: present - become: true - when: deploy_flask_app_bastion_host_required_packages | length > 0 - -- name: Generate ssh configuration for current user - ansible.builtin.user: - generate_ssh_key: true - state: present - name: "{{ deploy_flask_app_bastion_host_username }}" - -- name: Copy remote ssh private key file into bastion - ansible.builtin.copy: - src: "{{ deploy_flask_app_bastion_ssh_private_key }}" - dest: "{{ deploy_flask_app_workers_ssh_private_key }}" - mode: 0400 diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index 7b7ed634..e366d812 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -52,17 +52,12 @@ - name: Create list of instances (join) ansible.builtin.set_fact: - deploy_flask_app_instances_list: [] + deploy_flask_app_instances: [] - name: Update join_instances ansible.builtin.set_fact: - deploy_flask_app_instances_list: "{{ deploy_flask_app_instances_list + [item.instance_id + ':' + item.private_ip_address] }}" + deploy_flask_app_instances: "{{ deploy_flask_app_instances + [item.instance_id + ':' + item.private_ip_address] }}" with_items: "{{ deploy_flask_app_vms.instances }}" - - name: Set variables - ansible.builtin.set_fact: - deploy_flask_app_workers_instances: "{{ deploy_flask_app_vms.instances }}" - deploy_flask_app_workers_join: "{{ deploy_flask_app_instances_list | join(',') }}" - - name: Create load balancer amazon.aws.elb_classic_lb: state: present diff --git a/roles/deploy_flask_app/tasks/update_inventory.yaml b/roles/deploy_flask_app/tasks/update_inventory.yaml index cc8286df..5db69292 100644 --- a/roles/deploy_flask_app/tasks/update_inventory.yaml +++ b/roles/deploy_flask_app/tasks/update_inventory.yaml @@ -1,21 +1,38 @@ --- +# Configure local ssh config +- name: Create ssh configuration files + ansible.builtin.file: + state: "{{ item.state }}" + path: "{{ item.path }}" + with_items: + - state: directory + path: "~/.ssh" + - state: touch + path: "~/.ssh/config" + +- name: Update local .ssh/config + ansible.builtin.blockinfile: + state: present + insertafter: EOF + dest: "~/.ssh/config" + content: "{{ lookup('template', 'ssh_config.j2') }}" + - name: Add bastion host into inventory ansible.builtin.add_host: hostname: bastion - ansible_ssh_user: "{{ deploy_flask_app_bastion_host_username }}" - ansible_host: "{{ deploy_flask_app__bastion_public_ip }}" - ansible_ssh_common_args: -o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_bastion_ssh_private_key }} ansible_python_interpreter: auto - ansible_host_name: "{{ deploy_flask_app__resource_prefix }}" - host_config: - public_subnet_id: "{{ deploy_flask_app__public_subnet_id }}" - private_subnet_id: "{{ deploy_flask_app__private_subnet_id }}" - image_id: "{{ deploy_flask_app__vm_image_id }}" - group_id: "{{ deploy_flask_app__group_id }}" - private_ip: "{{ deploy_flask_app__bastion_private_ip }}" - vpc_id: "{{ deploy_flask_app_vpc_id }}" - rds_info: - host: "{{ deploy_flask_app__rds_host }}" - name: "{{ deploy_flask_app__rds_dbname }}" - master_user_password: "{{ deploy_flask_app_rds_master_password }}" - master_username: "{{ deploy_flask_app_rds_master_username }}" + ansible_host_name: bastion + +- name: Copy remote ssh private key file into bastion + ansible.builtin.copy: + src: "{{ deploy_flask_app_bastion_ssh_private_key }}" + dest: "{{ deploy_flask_app_workers_ssh_private_key }}" + mode: 0400 + delegate_to: bastion + +- name: Add workers into inventory + ansible.builtin.add_host: + hostname: "{{ item }}" + ansible_python_interpreter: auto + ansible_host_name: "{{ item }}" + with_items: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" diff --git a/roles/deploy_flask_app/templates/inventory.j2 b/roles/deploy_flask_app/templates/inventory.j2 deleted file mode 100644 index 80f633bb..00000000 --- a/roles/deploy_flask_app/templates/inventory.j2 +++ /dev/null @@ -1,4 +0,0 @@ -[all] -{% for item in deploy_flask_app_workers_instances %} -{{ item.instance_id }} workers_hosts="{{ deploy_flask_app_workers_join }}" ansible_ssh_user="{{ deploy_flask_app_workers_user_name }}" ansible_ssh_common_args='-o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_workers_ssh_private_key }}' ansible_host="{{ item.private_ip_address }}" -{% endfor %} diff --git a/roles/deploy_flask_app/templates/ssh_config.j2 b/roles/deploy_flask_app/templates/ssh_config.j2 new file mode 100644 index 00000000..28e8a98f --- /dev/null +++ b/roles/deploy_flask_app/templates/ssh_config.j2 @@ -0,0 +1,15 @@ +Host bastion + HostName {{ deploy_flask_app__bastion_public_ip }} + User {{ deploy_flask_app_bastion_host_username }} + IdentityFile {{ deploy_flask_app_bastion_ssh_private_key }} + StrictHostKeyChecking no + UserKnownHostsFile /dev/null + +{% for item in deploy_flask_app_vms.instances %} +Host {{ item.instance_id }} + User {{ deploy_flask_app_workers_user_name }} + HostName {{ item.private_ip_address }} + StrictHostKeyChecking no + UserKnownHostsFile /dev/null + ProxyCommand ssh -T -q -o 'ForwardAgent yes' bastion 'ssh-add {{ deploy_flask_app_workers_ssh_private_key }} && nc %h %p' +{% endfor %} \ No newline at end of file diff --git a/roles/deploy_flask_app/templates/vars.yaml.j2 b/roles/deploy_flask_app/templates/vars.yaml.j2 deleted file mode 100644 index 86b3046f..00000000 --- a/roles/deploy_flask_app/templates/vars.yaml.j2 +++ /dev/null @@ -1,15 +0,0 @@ ---- -registry_host_port: "{{ deploy_flask_app_setup.add_host.host_vars.host_config.private_ip }}:{{ deploy_flask_app_local_registry_port }}" -registry_login: - user: "{{ deploy_flask_app_local_registry_user }}" - password: "{{ deploy_flask_app_local_registry_pwd }}" -rds_listening_port: "{{ rds_listening_port }}" -application_dir: "{{ deploy_flask_app_config.app_dir }}" -application_env: "{{ deploy_flask_app_config.env }}" -application_db: - host: "{{ deploy_flask_app__rds_host }}" - instance: "{{ deploy_flask_app__rds_dbname }}" - dbuser_name: "{{ deploy_flask_app_rds_master_username }}" - dbuser_password: "{{ deploy_flask_app_rds_master_password }}" - admin_user: "{{ deploy_flask_app_config.admin_user }}" - admin_password: "{{ deploy_flask_app_config.admin_password }}" diff --git a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml b/tests/integration/targets/test_deploy_flask_app/vars/main.yaml index 8f654ac0..72c0b8d2 100644 --- a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/vars/main.yaml @@ -30,31 +30,18 @@ bastion_host_iam_role: "{{ resource_prefix }}-role" deploy_flask_app_bastion_host_name: "{{ resource_prefix }}-bastion" deploy_flask_app_bastion_host_username: fedora -deploy_flask_app_bastion_host_required_packages: [] -deploy_flask_app_bastion_cloud_config_packages: - - python3 - - python-virtualenv - - sshpass - - git - - podman - - httpd-tools - - ansible deploy_flask_app_sshkey_pair_name: "{{ resource_prefix }}-key" deploy_flask_app_workers_user_name: fedora deploy_flask_app_workers_instance_type: t3.micro deploy_flask_app_number_of_workers: 2 deploy_flask_app_listening_port: 5000 -deploy_flask_app_git_repository: https://github.com/abikouo/webapp_pyflask_demo.git deploy_flask_app_config: env: development admin_user: admin admin_password: admin app_dir: /app/pyapp deploy_flask_app_force_init: false -deploy_flask_app_local_registry_user: ansible -deploy_flask_app_local_registry_pwd: testing123 -deploy_flask_app_local_registry_port: "{{ deploy_flask_app_listening_port }}" deploy_flask_app_rds_master_password: L#5cH2mgy_ deploy_flask_app_rds_master_username: ansible From 8af0f2c7f6ba434423274ac4439a2bb41567bfd0 Mon Sep 17 00:00:00 2001 From: abikouo Date: Fri, 22 Dec 2023 15:15:44 +0100 Subject: [PATCH 21/51] update playbooks --- playbooks/webapp/README.md | 14 ---- playbooks/webapp/files/ec2-trust-policy.json | 13 ++++ playbooks/webapp/files/run_app.yaml | 70 ------------------- playbooks/webapp/tasks/create.yaml | 14 ++++ playbooks/webapp/vars/main.yaml | 14 +--- roles/deploy_flask_app/README.md | 5 -- .../targets/test_deploy_flask_app/aliases | 2 +- .../test_deploy_flask_app/defaults/main.yml | 2 + .../test_deploy_flask_app/handlers/main.yml | 6 +- .../test_deploy_flask_app/tasks/create.yaml | 19 ++--- .../test_deploy_flask_app/tasks/main.yaml | 21 +++--- 11 files changed, 52 insertions(+), 128 deletions(-) create mode 100644 playbooks/webapp/files/ec2-trust-policy.json delete mode 100644 playbooks/webapp/files/run_app.yaml diff --git a/playbooks/webapp/README.md b/playbooks/webapp/README.md index bf7bdef3..ffd54b22 100644 --- a/playbooks/webapp/README.md +++ b/playbooks/webapp/README.md @@ -101,16 +101,6 @@ To delete the webapp: * **deploy_flask_app_bastion_host_name** (str): Name for the EC2 instance. Default: `"{{ resource_prefix }}-bastion"` * **bastion_host_type** (str): Instance type for the EC2 instance. Default: `t2.xlarge` * **deploy_flask_app_bastion_host_username** (str): Username for the bastion host SSH user. Default: `fedora` -* **deploy_flask_app_bastion_host_required_packages** (list, elements str): Packages to be installed on the bastion host. Default: - ```yaml - - python3 - - python-virtualenv - - sshpass - - git - - podman - - httpd-tools - - ansible - ``` ### Networking @@ -142,12 +132,8 @@ To delete the webapp: ### Webapp -* **deploy_flask_app_git_repository** (str): Git repository for the webapp. Default: `https://github.com/abikouo/webapp_pyflask_demo.git` * **deploy_flask_app_number_of_workers** (int): Number of worker instances to create. Default: `2` * **deploy_flask_app_workers_instance_type** (str): EC2 instance type for workers. Default: `t2.xlarge` -* **deploy_flask_app_local_registry_user** (str): Username for local Podman registry. Default: `ansible` -* **deploy_flask_app_local_registry_pwd** (str): Password for local Podman registry. Default: `testing123` -* **deploy_flask_app_local_registry_port** (int): Port for the local Podman registery. Default: `"{{ app_listening_port }}"` * **deploy_flask_app_config** (dict, elements dict): Configuration values for the webapp, passed as corresponding env variables FLASK_APP, FLASK_ENV, ADMIN_USER, and ADMIN_PASSWORD when the app is deployed. Default: ```yaml app_dir: /app/pyapp diff --git a/playbooks/webapp/files/ec2-trust-policy.json b/playbooks/webapp/files/ec2-trust-policy.json new file mode 100644 index 00000000..63d22eae --- /dev/null +++ b/playbooks/webapp/files/ec2-trust-policy.json @@ -0,0 +1,13 @@ +{ + "Version": "2008-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Principal": { + "Service": "ec2.amazonaws.com" + }, + "Action": "sts:AssumeRole" + } + ] + } diff --git a/playbooks/webapp/files/run_app.yaml b/playbooks/webapp/files/run_app.yaml deleted file mode 100644 index 62033421..00000000 --- a/playbooks/webapp/files/run_app.yaml +++ /dev/null @@ -1,70 +0,0 @@ ---- -- name: Run app - hosts: all - gather_facts: false - strategy: free - become: true - - vars: - container_name: webapp-container01 - - tasks: - - name: Update ssh_config - ansible.builtin.lineinfile: - path: /etc/ssh/sshd_config - regex: "{{ item.regex }}" - line: "{{ item.line }}" - loop: - - regex: ^(# *)?ClientAliveInterval - line: ClientAliveInterval 1200 - - regex: ^(# *)?ClientAliveCountMax - line: ClientAliveCountMax 3 - - - name: Install Podman - ansible.builtin.yum: - name: - - podman - sslverify: false - validate_certs: false - update_cache: true - state: present - - - name: Pull image from private registry - ansible.builtin.shell: - cmd: > - podman login {{ registry_host_port }} - -u {{ registry_login.user }} - -p {{ registry_login.password }} - --tls-verify=false && - podman pull {{ registry_host_port }}/ansible-webapp - --tls-verify=false - changed_when: false - - - name: Check running container - ansible.builtin.shell: - cmd: > - podman container ps -a - -f name={{ container_name }} - --format=.Names - register: container - changed_when: false - - - name: Run application instance - ansible.builtin.shell: - cmd: > - podman run --rm - -e FLASK_APP="{{ application_dir }}" - -e FLASK_ENV="{{ application_env }}" - -e DATABASE_HOST="{{ application_db.host }}" - -e DATABASE_INSTANCE="{{ application_db.instance }}" - -e DATABASE_USER="{{ application_db.dbuser_name }}" - -e DATABASE_PASSWORD="{{ application_db.dbuser_password }}" - -e ADMIN_USER="{{ application_db.admin_user }}" - -e ADMIN_PASSWORD="{{ application_db.admin_password }}" - -e WORKER_HOSTNAME="{{ inventory_hostname }}" - -e WORKERS_HOSTS="{{ workers_hosts }}" - -p 5000:5000 --name {{ container_name }} - -d {{ registry_host_port }}/ansible-webapp - when: - - container.stdout == "" - changed_when: true diff --git a/playbooks/webapp/tasks/create.yaml b/playbooks/webapp/tasks/create.yaml index c885e592..ee870b65 100644 --- a/playbooks/webapp/tasks/create.yaml +++ b/playbooks/webapp/tasks/create.yaml @@ -227,6 +227,15 @@ instance-state-name: running register: vm_result + - name: Ensure IAM instance role exists + amazon.aws.iam_role: + name: "{{ ec2_iam_role_name }}" + assume_role_policy_document: "{{ lookup('file', 'ec2-trust-policy.json') }}" + state: present + create_instance_profile: true + wait: true + register: role_output + - name: Create a virtual machine when: vm_result.instances | length == 0 amazon.aws.ec2_instance: @@ -235,12 +244,17 @@ image_id: "{{ images.images.0.image_id }}" key_name: "{{ deploy_flask_app_sshkey_pair_name }}" subnet_id: "{{ subnet.subnet.id }}" + ebs_optimized: true + instance_role: "{{ role_output.iam_role.role_name }}" network: assign_public_ip: true groups: - "{{ secgroup.group_id }}" security_groups: - "{{ secgroup.group_id }}" + user_data: | + #!/bin/bash + yum install -y python3 python-virtualenv sshpass netcat wait: true state: started register: vm_result diff --git a/playbooks/webapp/vars/main.yaml b/playbooks/webapp/vars/main.yaml index b04589a8..a0de9c64 100644 --- a/playbooks/webapp/vars/main.yaml +++ b/playbooks/webapp/vars/main.yaml @@ -14,6 +14,7 @@ operation: create image_filter: Fedora-Cloud-Base-35-*gp2-0 public_secgroup_name: "{{ resource_prefix }}-sg" +ec2_iam_role_name: "{{ resource_prefix }}-role" rds_subnet_group_name: "{{ resource_prefix }}-rds-sg" rds_secgroup_name: "{{ resource_prefix }}-rds-sec" rds_identifier: "{{ resource_prefix }}-rds-01" @@ -30,29 +31,16 @@ rds_listening_port: 5432 deploy_flask_app_sshkey_pair_name: "{{ resource_prefix }}-key" deploy_flask_app_bastion_host_name: "{{ resource_prefix }}-bastion" deploy_flask_app_bastion_host_username: fedora -deploy_flask_app_bastion_host_required_packages: - - python3 - - python-virtualenv - - sshpass - - git - - gcc - - podman - - httpd-tools - - ansible-core deploy_flask_app_workers_instance_type: t2.xlarge deploy_flask_app_workers_user_name: fedora deploy_flask_app_number_of_workers: 2 deploy_flask_app_listening_port: 5000 -deploy_flask_app_git_repository: https://github.com/abikouo/webapp_pyflask_demo.git deploy_flask_app_config: env: development admin_user: admin admin_password: admin app_dir: /app/pyapp deploy_flask_app_force_init: false -deploy_flask_app_local_registry_user: ansible -deploy_flask_app_local_registry_pwd: testing123 -deploy_flask_app_local_registry_port: "{{ deploy_flask_app_listening_port }}" deploy_flask_app_rds_master_password: L#5cH2mgy_ deploy_flask_app_rds_master_username: ansible diff --git a/roles/deploy_flask_app/README.md b/roles/deploy_flask_app/README.md index 7105c128..6b3ca73a 100644 --- a/roles/deploy_flask_app/README.md +++ b/roles/deploy_flask_app/README.md @@ -36,7 +36,6 @@ Role Variables # Bastion host * **deploy_flask_app_bastion_host_name** (str): Name for the EC2 instance. * **deploy_flask_app_bastion_host_username** (str): Username for the bastion host SSH user. -* **deploy_flask_app_bastion_host_required_packages** (list): Packages to be installed on the bastion host. * **deploy_flask_app_sshkey_pair_name** (str): Name for the EC2 key pair. * **deploy_flask_app_bastion_ssh_private_key** (path): The path to the ssh private key file to use to connect to the bastion host. * **deploy_flask_app_number_of_workers** (int): Number of instances to create. @@ -44,12 +43,8 @@ Role Variables * **deploy_flask_app_workers_user_name** (str): Username for the workers. # App -* **deploy_flask_app_git_repository** (str): Git repository to be cloned for the webapp. * **deploy_flask_app_listening_port** (int): Load balancer port. * **deploy_flask_app_force_init** (bool): A boolean value True to force init the app and False to not force init. -* **deploy_flask_app_local_registry_port** (int): Port for the local Podman registry. -* **deploy_flask_app_local_registry_user** (str): Registry user name. -* **deploy_flask_app_local_registry_pwd** (str): Registry password. * **deploy_flask_app_config** (dict): A dict of config parameterys for the app. **env** (str): Flask env. **admin_user** (str): App config's admin username. diff --git a/tests/integration/targets/test_deploy_flask_app/aliases b/tests/integration/targets/test_deploy_flask_app/aliases index de2fdf2f..02000abc 100644 --- a/tests/integration/targets/test_deploy_flask_app/aliases +++ b/tests/integration/targets/test_deploy_flask_app/aliases @@ -1,3 +1,3 @@ -cloud/aws +!cloud/aws role/deploy_flask_app time=35m diff --git a/tests/integration/targets/test_deploy_flask_app/defaults/main.yml b/tests/integration/targets/test_deploy_flask_app/defaults/main.yml index 0e6574a5..67ac7d17 100644 --- a/tests/integration/targets/test_deploy_flask_app/defaults/main.yml +++ b/tests/integration/targets/test_deploy_flask_app/defaults/main.yml @@ -1 +1,3 @@ aws_security_token: '{{ security_token | default(omit) }}' +aws_region: eu-west-2 +resource_prefix: "asnbible-test-user-data-20231221" \ No newline at end of file diff --git a/tests/integration/targets/test_deploy_flask_app/handlers/main.yml b/tests/integration/targets/test_deploy_flask_app/handlers/main.yml index 2e32ab7d..bbbbd13d 100644 --- a/tests/integration/targets/test_deploy_flask_app/handlers/main.yml +++ b/tests/integration/targets/test_deploy_flask_app/handlers/main.yml @@ -1,6 +1,6 @@ --- -- name: Delete temporary key pair directory +- name: Delete temporary key pair file ansible.builtin.file: state: absent - path: "{{ test_deploy_flask_app__tmpdir.path }}" - when: test_deploy_flask_app__tmpdir is defined + path: "{{ test_deploy_flask_app__sshkey.path }}" + when: test_deploy_flask_app__sshkey is defined diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml index dfba3004..9ff20a6a 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml @@ -166,25 +166,18 @@ db_instance_identifier: "{{ rds_identifier }}" register: rds_result - - name: Create temporary directory to save private key in + - name: Create temporary file for ssh private key ansible.builtin.tempfile: - suffix: .key - state: directory - register: test_deploy_flask_app__tmpdir - notify: 'Delete temporary key pair directory' + suffix: .id_rsa + register: test_deploy_flask_app__sshkey + # notify: 'Delete temporary key pair file' - name: Create key pair to connect to the VM amazon.aws.ec2_key: name: "{{ deploy_flask_app_sshkey_pair_name }}" + file_name: "{{ test_deploy_flask_app__sshkey.path }}" register: rsa_key - - name: Save private key into file - ansible.builtin.copy: - content: "{{ rsa_key.key.private_key }}" - dest: "{{ test_deploy_flask_app__tmpdir.path }}/id_rsa" - mode: 0400 - when: rsa_key is changed - - name: Ensure IAM instance role exists amazon.aws.iam_role: name: "{{ bastion_host_iam_role }}" @@ -211,7 +204,7 @@ - "{{ secgroup.group_id }}" user_data: | #!/bin/bash - yum install -y python3 python-virtualenv sshpass git podman httpd-tools ansible + yum install -y python3 python-virtualenv sshpass netcat wait: true state: started register: vm_result diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml index cb841a8b..06be0a38 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml @@ -2,10 +2,12 @@ - name: "Run deploy_flask_app integration tests" module_defaults: group/aws: - aws_access_key: "{{ aws_access_key }}" - aws_secret_key: "{{ aws_secret_key }}" - security_token: "{{ aws_security_token }}" - region: "{{ aws_region }}" + # aws_access_key: "{{ aws_access_key }}" + # aws_secret_key: "{{ aws_secret_key }}" + # security_token: "{{ aws_security_token }}" + # region: "{{ aws_region }}" + aws_profile: eu_london + region: eu-west-2 block: - name: Run operation create @@ -19,7 +21,8 @@ deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" deploy_flask_app_vm_info: "{{ vm_result }}" deploy_flask_app_rds_info: "{{ rds_result }}" - deploy_flask_app_bastion_ssh_private_key: "{{ test_deploy_flask_app__tmpdir.path }}/id_rsa" + # deploy_flask_app_bastion_ssh_private_key: "{{ test_deploy_flask_app__sshkey.path }}" + deploy_flask_app_bastion_ssh_private_key: /tmp/ansible.db0gt4_6.id_rsa - name: Check that a page returns successfully ansible.builtin.uri: @@ -29,7 +32,7 @@ retries: 5 delay: 10 - always: - # Cleanup after ourselves - - name: Cleanup - ansible.builtin.include_tasks: "delete.yaml" + # always: + # # Cleanup after ourselves + # - name: Cleanup + # ansible.builtin.include_tasks: "delete.yaml" From 94c14573163e3beb77ca969f2464c1c0a621f0b7 Mon Sep 17 00:00:00 2001 From: abikouo Date: Mon, 8 Jan 2024 15:59:51 +0100 Subject: [PATCH 22/51] minor updates --- roles/deploy_flask_app/defaults/main.yml | 2 + roles/deploy_flask_app/meta/main.yaml | 6 +- roles/deploy_flask_app/tasks/deploy_app.yaml | 54 ------------ roles/deploy_flask_app/tasks/main.yaml | 10 +-- roles/deploy_flask_app/tasks/setup_infra.yaml | 15 ++-- .../tasks/start_containers.yaml | 83 +++++++++++++++++++ .../tasks/update_inventory.yaml | 38 --------- .../templates/bastion_ssh_config.j2 | 8 ++ .../templates/deploy_app.yaml.j2 | 49 +++++++++++ .../templates/local_ssh_config.j2 | 6 ++ .../deploy_flask_app/templates/ssh_config.j2 | 15 ---- .../templates/workers_inventory.yaml.j2 | 6 ++ .../targets/setup_rsa_keys/defaults/main.yml | 2 + .../targets/setup_rsa_keys/tasks/main.yml | 35 +++++--- .../targets/test_deploy_flask_app/aliases | 2 +- .../test_deploy_flask_app/defaults/main.yml | 2 - .../test_deploy_flask_app/handlers/main.yml | 6 -- .../test_deploy_flask_app/meta/main.yml | 3 + .../test_deploy_flask_app/tasks/create.yaml | 11 +-- .../test_deploy_flask_app/tasks/main.yaml | 21 ++--- .../test_deploy_flask_app/vars/main.yaml | 2 +- 21 files changed, 205 insertions(+), 171 deletions(-) delete mode 100644 roles/deploy_flask_app/tasks/deploy_app.yaml create mode 100644 roles/deploy_flask_app/tasks/start_containers.yaml delete mode 100644 roles/deploy_flask_app/tasks/update_inventory.yaml create mode 100644 roles/deploy_flask_app/templates/bastion_ssh_config.j2 create mode 100644 roles/deploy_flask_app/templates/deploy_app.yaml.j2 create mode 100644 roles/deploy_flask_app/templates/local_ssh_config.j2 delete mode 100644 roles/deploy_flask_app/templates/ssh_config.j2 create mode 100644 roles/deploy_flask_app/templates/workers_inventory.yaml.j2 create mode 100644 tests/integration/targets/setup_rsa_keys/defaults/main.yml delete mode 100644 tests/integration/targets/test_deploy_flask_app/handlers/main.yml create mode 100644 tests/integration/targets/test_deploy_flask_app/meta/main.yml diff --git a/roles/deploy_flask_app/defaults/main.yml b/roles/deploy_flask_app/defaults/main.yml index 5fa51023..b93c0b82 100644 --- a/roles/deploy_flask_app/defaults/main.yml +++ b/roles/deploy_flask_app/defaults/main.yml @@ -1,3 +1,5 @@ --- deploy_flask_app_workers_ssh_private_key: /tmp/id_rsa +deploy_flask_app_workers_inventory_file: /tmp/workers_inventory.yaml +deploy_flask_app_workers_playbook_file: /tmp/deploy_app.yaml deploy_flask_app_container_image: docker.io/aubinredhat/webapp:1.0.0 diff --git a/roles/deploy_flask_app/meta/main.yaml b/roles/deploy_flask_app/meta/main.yaml index 3bf1568b..975d38ef 100644 --- a/roles/deploy_flask_app/meta/main.yaml +++ b/roles/deploy_flask_app/meta/main.yaml @@ -1,4 +1,4 @@ --- -dependencies: - - role: cloud.aws_ops.aws_setup_credentials -allow_duplicates: true +# dependencies: +# - role: cloud.aws_ops.aws_setup_credentials +# allow_duplicates: true diff --git a/roles/deploy_flask_app/tasks/deploy_app.yaml b/roles/deploy_flask_app/tasks/deploy_app.yaml deleted file mode 100644 index 406d32f0..00000000 --- a/roles/deploy_flask_app/tasks/deploy_app.yaml +++ /dev/null @@ -1,54 +0,0 @@ ---- -- name: Deploy application into worker - delegate_to: "{{ worker_id }}" - vars: - deploy_flask_app__worker_container_name: "webapp-container1" - block: - - name: Update ssh_config - ansible.builtin.lineinfile: - path: /etc/ssh/sshd_config - regex: "{{ item.regex }}" - line: "{{ item.line }}" - loop: - - regex: ^(# *)?ClientAliveInterval - line: ClientAliveInterval 1200 - - regex: ^(# *)?ClientAliveCountMax - line: ClientAliveCountMax 3 - become: true - - - name: Install Podman - ansible.builtin.yum: - name: - - podman - update_cache: False - state: present - become: true - - - name: Check running container - ansible.builtin.shell: - cmd: > - podman container ps -a - -f name={{ deploy_flask_app__worker_container_name }} - --format=.Names - register: container - changed_when: false - - - name: Run application instance - ansible.builtin.shell: - cmd: >- - podman run --rm - -e FLASK_APP="{{ deploy_flask_app_config.app_dir }}" - -e FLASK_ENV="{{ deploy_flask_app_config.env }}" - -e DATABASE_HOST="{{ deploy_flask_app__rds_host }}" - -e DATABASE_INSTANCE="{{ deploy_flask_app__rds_dbname }}" - -e DATABASE_USER="{{ deploy_flask_app_rds_master_username }}" - -e DATABASE_PASSWORD="{{deploy_flask_app_rds_master_password }}" - -e ADMIN_USER="{{ deploy_flask_app_config.admin_user }}" - -e ADMIN_PASSWORD="{{ deploy_flask_app_config.admin_password }}" - -e WORKER_HOSTNAME="{{ worker_id }}" - -e WORKERS_HOSTS="{{ deploy_flask_app_instances | join(',') }}" - -p 5000:5000 --name {{ deploy_flask_app__worker_container_name }} - -d {{ deploy_flask_app_container_image }} - when: - - container.stdout == "" - changed_when: true diff --git a/roles/deploy_flask_app/tasks/main.yaml b/roles/deploy_flask_app/tasks/main.yaml index 328d2a8d..93a2073a 100644 --- a/roles/deploy_flask_app/tasks/main.yaml +++ b/roles/deploy_flask_app/tasks/main.yaml @@ -16,11 +16,5 @@ - name: Create infrastructure - workers and load balancer ansible.builtin.include_tasks: setup_infra.yaml - - name: Add bastion host to inventory - ansible.builtin.include_tasks: update_inventory.yaml - - - name: Deploy application into workers - ansible.builtin.include_tasks: deploy_app.yaml - with_items: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" - loop_control: - loop_var: worker_id + - name: Start application container into workers + ansible.builtin.include_tasks: start_containers.yaml diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index e366d812..388f2640 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -1,12 +1,5 @@ --- - name: Create Cloud Resources (workers, load balancer, etc) - module_defaults: - group/aws: - aws_access_key: "{{ aws_access_key | default(omit) }}" - aws_secret_key: "{{ aws_secret_key | default(omit) }}" - security_token: "{{ security_token | default(omit) }}" - region: "{{ deploy_flask_app_region | default(aws_region) }}" - block: - name: Set variables ansible.builtin.set_fact: @@ -15,7 +8,9 @@ - name: List running instances amazon.aws.ec2_instance_info: filters: - tag:Name: "{{ deploy_flask_app_instance_name }}" + network-interface.subnet-id: "{{ deploy_flask_app_private_subnet_id }}" + key-name: "{{ deploy_flask_app_sshkey_pair_name }}" + image-id: "{{ deploy_flask_app__vm_image_id }}" instance-state-name: running register: deploy_flask_app_vms @@ -46,7 +41,9 @@ - name: List running instances (once again) amazon.aws.ec2_instance_info: filters: - tag:Name: "{{ deploy_flask_app_instance_name }}" + network-interface.subnet-id: "{{ deploy_flask_app_private_subnet_id }}" + key-name: "{{ deploy_flask_app_sshkey_pair_name }}" + image-id: "{{ deploy_flask_app__vm_image_id }}" instance-state-name: running register: deploy_flask_app_vms diff --git a/roles/deploy_flask_app/tasks/start_containers.yaml b/roles/deploy_flask_app/tasks/start_containers.yaml new file mode 100644 index 00000000..359885fc --- /dev/null +++ b/roles/deploy_flask_app/tasks/start_containers.yaml @@ -0,0 +1,83 @@ +--- +# Configure local ssh config +- name: Create ssh configuration files + ansible.builtin.file: + state: "{{ item.state }}" + path: "{{ item.path }}" + mode: '0755' + with_items: + - state: directory + path: "~/.ssh" + - state: touch + path: "~/.ssh/config" + +- name: Update local .ssh/config + ansible.builtin.blockinfile: + state: present + insertafter: EOF + dest: "~/.ssh/config" + content: "{{ lookup('template', 'local_ssh_config.j2') }}" + +- name: Add bastion host into inventory + ansible.builtin.add_host: + hostname: bastion + ansible_python_interpreter: auto + ansible_host_name: bastion + +- name: Update local .ssh/config + ansible.builtin.blockinfile: + state: present + insertafter: EOF + dest: "~/.ssh/config" + content: "{{ lookup('template', 'local_ssh_config.j2') }}" + +- name: Configure bastion + delegate_to: bastion + block: + - name: Create ssh configuration files + ansible.builtin.file: + state: "{{ item.state }}" + path: "{{ item.path }}" + mode: '0755' + with_items: + - state: directory + path: "~/.ssh" + - state: touch + path: "~/.ssh/config" + + - name: Update local .ssh/config + ansible.builtin.blockinfile: + state: present + insertafter: EOF + dest: "~/.ssh/config" + content: "{{ lookup('template', 'bastion_ssh_config.j2') }}" + + - name: Copy remote ssh private key file into bastion + ansible.builtin.copy: + src: "{{ deploy_flask_app_bastion_ssh_private_key }}" + dest: "{{ deploy_flask_app_workers_ssh_private_key }}" + mode: 0400 + + - name: Generate workers inventory file + ansible.builtin.copy: + content: "{{ lookup('template', 'workers_inventory.yaml.j2') }}" + dest: "{{ deploy_flask_app_workers_inventory_file }}" + mode: 0755 + + - name: Generate playbook to deploy application + ansible.builtin.copy: + content: "{{ lookup('template', 'deploy_app.yaml.j2') }}" + dest: "{{ deploy_flask_app_workers_playbook_file }}" + mode: 0755 + vars: + deploy_flask_app_instances_list: "{{ deploy_flask_app_instances | join(',') }}" + deploy_flask_app_worker_hostname: "{{ '{{' }} inventory_hostname {{ '}}' }}" + + - name: Deploy application into workers + ansible.builtin.shell: + cmd: >- + ansible-playbook + -i {{ deploy_flask_app_workers_inventory_file }} + {{ deploy_flask_app_workers_playbook_file }} + -v + changed_when: false diff --git a/roles/deploy_flask_app/tasks/update_inventory.yaml b/roles/deploy_flask_app/tasks/update_inventory.yaml deleted file mode 100644 index 5db69292..00000000 --- a/roles/deploy_flask_app/tasks/update_inventory.yaml +++ /dev/null @@ -1,38 +0,0 @@ ---- -# Configure local ssh config -- name: Create ssh configuration files - ansible.builtin.file: - state: "{{ item.state }}" - path: "{{ item.path }}" - with_items: - - state: directory - path: "~/.ssh" - - state: touch - path: "~/.ssh/config" - -- name: Update local .ssh/config - ansible.builtin.blockinfile: - state: present - insertafter: EOF - dest: "~/.ssh/config" - content: "{{ lookup('template', 'ssh_config.j2') }}" - -- name: Add bastion host into inventory - ansible.builtin.add_host: - hostname: bastion - ansible_python_interpreter: auto - ansible_host_name: bastion - -- name: Copy remote ssh private key file into bastion - ansible.builtin.copy: - src: "{{ deploy_flask_app_bastion_ssh_private_key }}" - dest: "{{ deploy_flask_app_workers_ssh_private_key }}" - mode: 0400 - delegate_to: bastion - -- name: Add workers into inventory - ansible.builtin.add_host: - hostname: "{{ item }}" - ansible_python_interpreter: auto - ansible_host_name: "{{ item }}" - with_items: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" diff --git a/roles/deploy_flask_app/templates/bastion_ssh_config.j2 b/roles/deploy_flask_app/templates/bastion_ssh_config.j2 new file mode 100644 index 00000000..ec4c2e10 --- /dev/null +++ b/roles/deploy_flask_app/templates/bastion_ssh_config.j2 @@ -0,0 +1,8 @@ +{% for item in deploy_flask_app_vms.instances %} +Host {{ item.instance_id }} + User {{ deploy_flask_app_workers_user_name }} + HostName {{ item.private_ip_address }} + IdentityFile {{ deploy_flask_app_workers_ssh_private_key }} + StrictHostKeyChecking no + UserKnownHostsFile /dev/null +{% endfor %} \ No newline at end of file diff --git a/roles/deploy_flask_app/templates/deploy_app.yaml.j2 b/roles/deploy_flask_app/templates/deploy_app.yaml.j2 new file mode 100644 index 00000000..543890cc --- /dev/null +++ b/roles/deploy_flask_app/templates/deploy_app.yaml.j2 @@ -0,0 +1,49 @@ +--- +- name: Run app + hosts: all + gather_facts: false + strategy: free + become: true + + tasks: + - name: Update ssh_config to increase ssh session lifetime + ansible.builtin.blockinfile: + path: /etc/ssh/sshd_config + block: | + ClientAliveInterval 1200 + ClientAliveCountMax 3 + + - name: Install Podman + ansible.builtin.yum: + name: + - podman + update_cache: True + state: present + + - name: Check running container + ansible.builtin.shell: + cmd: "podman container ps -a -f name=webapp-container-1 --format=.Names" + register: container + changed_when: false + + - name: Run application instance + ansible.builtin.shell: + cmd: >- + podman run + --rm + -e FLASK_APP="{{ deploy_flask_app_config.app_dir }}" + -e FLASK_ENV="{{ deploy_flask_app_config.env }}" + -e DATABASE_HOST="{{ deploy_flask_app__rds_host }}" + -e DATABASE_INSTANCE="{{ deploy_flask_app__rds_dbname }}" + -e DATABASE_USER="{{ deploy_flask_app_rds_master_username }}" + -e DATABASE_PASSWORD="{{ deploy_flask_app_rds_master_password }}" + -e ADMIN_USER="{{ deploy_flask_app_config.admin_user }}" + -e ADMIN_PASSWORD="{{ deploy_flask_app_config.admin_password }}" + -e WORKER_HOSTNAME='{{ deploy_flask_app_worker_hostname }}' + -e WORKERS_HOSTS="{{ deploy_flask_app_instances_list }}" + -p 5000:5000 + --name webapp-container-1 + -d {{ deploy_flask_app_container_image }} + when: + - container.stdout == "" + changed_when: true \ No newline at end of file diff --git a/roles/deploy_flask_app/templates/local_ssh_config.j2 b/roles/deploy_flask_app/templates/local_ssh_config.j2 new file mode 100644 index 00000000..6395d992 --- /dev/null +++ b/roles/deploy_flask_app/templates/local_ssh_config.j2 @@ -0,0 +1,6 @@ +Host bastion + HostName {{ deploy_flask_app__bastion_public_ip }} + User {{ deploy_flask_app_bastion_host_username }} + IdentityFile {{ deploy_flask_app_bastion_ssh_private_key }} + StrictHostKeyChecking no + UserKnownHostsFile /dev/null \ No newline at end of file diff --git a/roles/deploy_flask_app/templates/ssh_config.j2 b/roles/deploy_flask_app/templates/ssh_config.j2 deleted file mode 100644 index 28e8a98f..00000000 --- a/roles/deploy_flask_app/templates/ssh_config.j2 +++ /dev/null @@ -1,15 +0,0 @@ -Host bastion - HostName {{ deploy_flask_app__bastion_public_ip }} - User {{ deploy_flask_app_bastion_host_username }} - IdentityFile {{ deploy_flask_app_bastion_ssh_private_key }} - StrictHostKeyChecking no - UserKnownHostsFile /dev/null - -{% for item in deploy_flask_app_vms.instances %} -Host {{ item.instance_id }} - User {{ deploy_flask_app_workers_user_name }} - HostName {{ item.private_ip_address }} - StrictHostKeyChecking no - UserKnownHostsFile /dev/null - ProxyCommand ssh -T -q -o 'ForwardAgent yes' bastion 'ssh-add {{ deploy_flask_app_workers_ssh_private_key }} && nc %h %p' -{% endfor %} \ No newline at end of file diff --git a/roles/deploy_flask_app/templates/workers_inventory.yaml.j2 b/roles/deploy_flask_app/templates/workers_inventory.yaml.j2 new file mode 100644 index 00000000..40219ae8 --- /dev/null +++ b/roles/deploy_flask_app/templates/workers_inventory.yaml.j2 @@ -0,0 +1,6 @@ +all: + hosts: +{% for item in deploy_flask_app_vms.instances %} + {{ item.instance_id }}: + ansible_python_interpreter: auto +{% endfor %} \ No newline at end of file diff --git a/tests/integration/targets/setup_rsa_keys/defaults/main.yml b/tests/integration/targets/setup_rsa_keys/defaults/main.yml new file mode 100644 index 00000000..7688fdce --- /dev/null +++ b/tests/integration/targets/setup_rsa_keys/defaults/main.yml @@ -0,0 +1,2 @@ +--- +setup_rsa_keys__path: "~/.ssh-{{ resource_prefix }}" diff --git a/tests/integration/targets/setup_rsa_keys/tasks/main.yml b/tests/integration/targets/setup_rsa_keys/tasks/main.yml index 1d0c94b8..23c16c8a 100644 --- a/tests/integration/targets/setup_rsa_keys/tasks/main.yml +++ b/tests/integration/targets/setup_rsa_keys/tasks/main.yml @@ -1,16 +1,25 @@ --- -- name: Create temporary directory to generate keys - ansible.builtin.tempfile: - state: directory - suffix: ssh - register: setup_rsa_keys__tmpdir - notify: 'Delete temporary RSA key directory' - -- name: Generate RSA keys - community.crypto.openssh_keypair: - path: "{{ setup_rsa_keys__tmpdir.path }}/id_rsa" - - name: Define path to private and public keys ansible.builtin.set_fact: - setup_rsa_keys__public_key_file: "{{ setup_rsa_keys__tmpdir.path }}/id_rsa.pub" - setup_rsa_keys__private_key_file: "{{ setup_rsa_keys__tmpdir.path }}/id_rsa" + setup_rsa_keys__public_key_file: "{{ setup_rsa_keys__path }}/id_rsa.pub" + setup_rsa_keys__private_key_file: "{{ setup_rsa_keys__path }}/id_rsa" + +- name: Check if ssh directory exists + ansible.builtin.stat: + path: "{{ item }}" + register: stats + with_items: + - "{{ setup_rsa_keys__public_key_file }}" + - "{{ setup_rsa_keys__private_key_file }}" + +- name: Generate RSA keys file + when: stats.results | selectattr('stat.exists', 'equalto', false) | list | length > 0 + block: + - name: Create directory to generate keys in + ansible.builtin.file: + path: "{{ setup_rsa_keys__path }}" + state: directory + + - name: Generate RSA keys + community.crypto.openssh_keypair: + path: "{{ setup_rsa_keys__path }}/id_rsa" diff --git a/tests/integration/targets/test_deploy_flask_app/aliases b/tests/integration/targets/test_deploy_flask_app/aliases index 02000abc..de2fdf2f 100644 --- a/tests/integration/targets/test_deploy_flask_app/aliases +++ b/tests/integration/targets/test_deploy_flask_app/aliases @@ -1,3 +1,3 @@ -!cloud/aws +cloud/aws role/deploy_flask_app time=35m diff --git a/tests/integration/targets/test_deploy_flask_app/defaults/main.yml b/tests/integration/targets/test_deploy_flask_app/defaults/main.yml index 67ac7d17..0e6574a5 100644 --- a/tests/integration/targets/test_deploy_flask_app/defaults/main.yml +++ b/tests/integration/targets/test_deploy_flask_app/defaults/main.yml @@ -1,3 +1 @@ aws_security_token: '{{ security_token | default(omit) }}' -aws_region: eu-west-2 -resource_prefix: "asnbible-test-user-data-20231221" \ No newline at end of file diff --git a/tests/integration/targets/test_deploy_flask_app/handlers/main.yml b/tests/integration/targets/test_deploy_flask_app/handlers/main.yml deleted file mode 100644 index bbbbd13d..00000000 --- a/tests/integration/targets/test_deploy_flask_app/handlers/main.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -- name: Delete temporary key pair file - ansible.builtin.file: - state: absent - path: "{{ test_deploy_flask_app__sshkey.path }}" - when: test_deploy_flask_app__sshkey is defined diff --git a/tests/integration/targets/test_deploy_flask_app/meta/main.yml b/tests/integration/targets/test_deploy_flask_app/meta/main.yml new file mode 100644 index 00000000..3d8b9c14 --- /dev/null +++ b/tests/integration/targets/test_deploy_flask_app/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - role: setup_rsa_keys diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml index 9ff20a6a..6f898161 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml @@ -166,17 +166,10 @@ db_instance_identifier: "{{ rds_identifier }}" register: rds_result - - name: Create temporary file for ssh private key - ansible.builtin.tempfile: - suffix: .id_rsa - register: test_deploy_flask_app__sshkey - # notify: 'Delete temporary key pair file' - - name: Create key pair to connect to the VM amazon.aws.ec2_key: name: "{{ deploy_flask_app_sshkey_pair_name }}" - file_name: "{{ test_deploy_flask_app__sshkey.path }}" - register: rsa_key + key_material: "{{ lookup('file', setup_rsa_keys__public_key_file) }}" - name: Ensure IAM instance role exists amazon.aws.iam_role: @@ -204,7 +197,7 @@ - "{{ secgroup.group_id }}" user_data: | #!/bin/bash - yum install -y python3 python-virtualenv sshpass netcat + yum install -y python3 python-virtualenv sshpass netcat ansible wait: true state: started register: vm_result diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml index 06be0a38..bc6f35b9 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml @@ -2,12 +2,10 @@ - name: "Run deploy_flask_app integration tests" module_defaults: group/aws: - # aws_access_key: "{{ aws_access_key }}" - # aws_secret_key: "{{ aws_secret_key }}" - # security_token: "{{ aws_security_token }}" - # region: "{{ aws_region }}" - aws_profile: eu_london - region: eu-west-2 + aws_access_key: "{{ aws_access_key }}" + aws_secret_key: "{{ aws_secret_key }}" + security_token: "{{ aws_security_token }}" + region: "{{ aws_region }}" block: - name: Run operation create @@ -21,8 +19,7 @@ deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" deploy_flask_app_vm_info: "{{ vm_result }}" deploy_flask_app_rds_info: "{{ rds_result }}" - # deploy_flask_app_bastion_ssh_private_key: "{{ test_deploy_flask_app__sshkey.path }}" - deploy_flask_app_bastion_ssh_private_key: /tmp/ansible.db0gt4_6.id_rsa + deploy_flask_app_bastion_ssh_private_key: "{{ setup_rsa_keys__private_key_file }}" - name: Check that a page returns successfully ansible.builtin.uri: @@ -32,7 +29,7 @@ retries: 5 delay: 10 - # always: - # # Cleanup after ourselves - # - name: Cleanup - # ansible.builtin.include_tasks: "delete.yaml" + always: + # Cleanup after ourselves + - name: Cleanup + ansible.builtin.include_tasks: "delete.yaml" diff --git a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml b/tests/integration/targets/test_deploy_flask_app/vars/main.yaml index 72c0b8d2..69c8a0db 100644 --- a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/vars/main.yaml @@ -22,7 +22,7 @@ rds_engine: postgres rds_engine_version: "14.8" bastion_host_type: t3.micro bastion_host_venv_path: ~/env -image_filter: Fedora-Cloud-Base-37-* +image_filter: Fedora-Cloud-Base-38-* bastion_host_iam_role: "{{ resource_prefix }}-role" # vars for the deploy_flask_app role and create task From 91f82fb0964b17f518204eefc6d5fa7049b945a0 Mon Sep 17 00:00:00 2001 From: abikouo Date: Mon, 8 Jan 2024 16:58:39 +0100 Subject: [PATCH 23/51] Replace ansible.builtin.shell by ansible.builtin.command --- roles/deploy_flask_app/meta/main.yaml | 6 +++--- roles/deploy_flask_app/tasks/start_containers.yaml | 12 +++++------- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/roles/deploy_flask_app/meta/main.yaml b/roles/deploy_flask_app/meta/main.yaml index 975d38ef..3bf1568b 100644 --- a/roles/deploy_flask_app/meta/main.yaml +++ b/roles/deploy_flask_app/meta/main.yaml @@ -1,4 +1,4 @@ --- -# dependencies: -# - role: cloud.aws_ops.aws_setup_credentials -# allow_duplicates: true +dependencies: + - role: cloud.aws_ops.aws_setup_credentials +allow_duplicates: true diff --git a/roles/deploy_flask_app/tasks/start_containers.yaml b/roles/deploy_flask_app/tasks/start_containers.yaml index 359885fc..1988f83c 100644 --- a/roles/deploy_flask_app/tasks/start_containers.yaml +++ b/roles/deploy_flask_app/tasks/start_containers.yaml @@ -74,10 +74,8 @@ deploy_flask_app_worker_hostname: "{{ '{{' }} inventory_hostname {{ '}}' }}" - name: Deploy application into workers - ansible.builtin.shell: - cmd: >- - ansible-playbook - -i {{ deploy_flask_app_workers_inventory_file }} - {{ deploy_flask_app_workers_playbook_file }} - -v - changed_when: false + ansible.builtin.command: >- + ansible-playbook + --inventory {{ deploy_flask_app_workers_inventory_file }} + {{ deploy_flask_app_workers_playbook_file }} + -v From 4ad3c540fa24cbcbe2d563fe5e9731a7724b1448 Mon Sep 17 00:00:00 2001 From: abikouo Date: Mon, 8 Jan 2024 18:05:39 +0100 Subject: [PATCH 24/51] fix: playbook webapp - delete resources --- playbooks/webapp/tasks/create.yaml | 12 +- playbooks/webapp/tasks/delete.yaml | 107 ++++++++---------- playbooks/webapp/vars/main.yaml | 6 +- roles/deploy_flask_app/tasks/main.yaml | 4 + .../targets/setup_rsa_keys/handlers/main.yml | 4 +- .../targets/setup_rsa_keys/tasks/main.yml | 1 + .../targets/test_deploy_flask_app/aliases | 1 + .../test_deploy_flask_app/tasks/delete.yaml | 2 +- .../targets/test_playbook_webapp/aliases | 2 + .../create_aws_credentials.yml | 13 +++ .../targets/test_playbook_webapp/runme.sh | 23 ++++ .../test_playbook_webapp/test_webapp.yaml | 1 + 12 files changed, 99 insertions(+), 77 deletions(-) create mode 100644 tests/integration/targets/test_playbook_webapp/aliases create mode 100644 tests/integration/targets/test_playbook_webapp/create_aws_credentials.yml create mode 100755 tests/integration/targets/test_playbook_webapp/runme.sh create mode 100644 tests/integration/targets/test_playbook_webapp/test_webapp.yaml diff --git a/playbooks/webapp/tasks/create.yaml b/playbooks/webapp/tasks/create.yaml index ee870b65..76d6e9e3 100644 --- a/playbooks/webapp/tasks/create.yaml +++ b/playbooks/webapp/tasks/create.yaml @@ -218,15 +218,6 @@ mode: 0400 when: rsa_key is changed - - name: Check if the vm exists - amazon.aws.ec2_instance_info: - filters: - instance-type: "{{ bastion_host_type }}" - key-name: "{{ deploy_flask_app_sshkey_pair_name }}" - vpc-id: "{{ vpc.vpc.id }}" - instance-state-name: running - register: vm_result - - name: Ensure IAM instance role exists amazon.aws.iam_role: name: "{{ ec2_iam_role_name }}" @@ -237,7 +228,6 @@ register: role_output - name: Create a virtual machine - when: vm_result.instances | length == 0 amazon.aws.ec2_instance: name: "{{ deploy_flask_app_bastion_host_name }}" instance_type: "{{ bastion_host_type }}" @@ -254,7 +244,7 @@ - "{{ secgroup.group_id }}" user_data: | #!/bin/bash - yum install -y python3 python-virtualenv sshpass netcat + yum install -y python3 python-virtualenv sshpass netcat ansible wait: true state: started register: vm_result diff --git a/playbooks/webapp/tasks/delete.yaml b/playbooks/webapp/tasks/delete.yaml index 1ae28294..929dace8 100644 --- a/playbooks/webapp/tasks/delete.yaml +++ b/playbooks/webapp/tasks/delete.yaml @@ -22,54 +22,33 @@ ansible.builtin.set_fact: vpc_id: "{{ vpc.vpcs.0.vpc_id }}" - - name: Get bastion instance info + # Delete Load balancer + - name: List Load balancer(s) from VPC + community.aws.elb_classic_lb_info: + register: load_balancers + + - name: Delete load balancer(s) + amazon.aws.elb_classic_lb: + name: "{{ item }}" + wait: true + state: absent + with_items: "{{ load_balancers.elbs | selectattr('vpc_id', 'equalto', vpc_id) | map(attribute='load_balancer_name') | list }}" + + # Delete EC2 instances + - name: Get EC2 instance info amazon.aws.ec2_instance_info: filters: - instance-type: "{{ bastion_host_type }}" - key-name: "{{ deploy_flask_app_sshkey_pair_name }}" vpc-id: "{{ vpc_id }}" - instance-state-name: running - register: bastion - - - name: Delete EC2 instances with dependant Resources - when: bastion.instances | length == 1 - block: - - name: Set 'instance_host_name' variable - ansible.builtin.set_fact: - instance_host_name: "{{ bastion.instances.0.public_dns_name | split('.') | first }}" - - - name: Delete workers key pair - amazon.aws.ec2_key: - name: "{{ instance_host_name }}-key" - state: absent - - - name: Delete load balancer - amazon.aws.elb_classic_lb: - name: "{{ instance_host_name }}-lb" - wait: true - state: absent - - - name: List workers - amazon.aws.ec2_instance_info: - filters: - tag:Name: "{{ instance_host_name }}-workers" - instance-state-name: running - register: running - - - name: Delete workers - when: running.instances | length != 0 - amazon.aws.ec2_instance: - instance_ids: "{{ running.instances | map(attribute='instance_id') | list }}" - wait: true - state: terminated - - - name: Delete bastion host - amazon.aws.ec2_instance: - instance_ids: - - "{{ bastion.instances.0.instance_id }}" - wait: true - state: terminated + register: ec2_instances + + - name: Delete ec2 instances from VPC + amazon.aws.ec2_instance: + instance_ids: "{{ ec2_instances.instances | map(attribute='instance_id') | list }}" + wait: true + state: terminated + when: ec2_instances.instances | length > 0 + # Delete RDS instance - name: Delete RDS instance amazon.aws.rds_instance: state: absent @@ -87,19 +66,7 @@ name: "{{ rds_subnet_group_name }}" state: absent - - name: List Security group from VPC - amazon.aws.ec2_security_group_info: - filters: - vpc-id: "{{ vpc_id }}" - tag:prefix: "{{ resource_prefix }}" - register: secgroups - - - name: Delete security groups - amazon.aws.ec2_security_group: - state: absent - group_id: "{{ item }}" - with_items: "{{ secgroups.security_groups | map(attribute='group_id') | list }}" - + # Delete VPC route table - name: List routes table from VPC amazon.aws.ec2_vpc_route_table_info: filters: @@ -115,6 +82,7 @@ state: absent with_items: "{{ route_table.route_tables | map(attribute='id') | list }}" + # Delete NAT Gateway - name: Get NAT gateway amazon.aws.ec2_vpc_nat_gateway_info: filters: @@ -128,20 +96,39 @@ wait: true with_items: "{{ nat_gw.result | map(attribute='nat_gateway_id') | list }}" + # Delete Internet gateway - name: Delete internet gateway amazon.aws.ec2_vpc_igw: vpc_id: "{{ vpc_id }}" state: absent + # Delete Subnets + - name: List Subnets from VPC + amazon.aws.ec2_vpc_subnet_info: + filters: + vpc-id: "{{ vpc_id }}" + register: vpc_subnets + - name: Delete subnets amazon.aws.ec2_vpc_subnet: cidr: "{{ item }}" state: absent vpc_id: "{{ vpc_id }}" - with_items: "{{ subnet_cidr }}" + with_items: "{{ vpc_subnets.subnets | map(attribute='cidr_block') | list }}" + + # Delete Security groups + - name: List Security group from VPC + amazon.aws.ec2_security_group_info: + filters: + vpc-id: "{{ vpc_id }}" + register: secgroups + + - name: Delete security groups + amazon.aws.ec2_security_group: + state: absent + group_id: "{{ item }}" + with_items: "{{ secgroups.security_groups | rejectattr('group_name', 'equalto', 'default') | map(attribute='group_id') | list }}" - # As ec2_vpc_route_table can't delete route table, the vpc still has dependencies and cannot be deleted. - # You need to do it delete it manually using either the console or the cli. - name: Delete VPC amazon.aws.ec2_vpc_net: name: "{{ vpc_name }}" diff --git a/playbooks/webapp/vars/main.yaml b/playbooks/webapp/vars/main.yaml index a0de9c64..d4a7fbc9 100644 --- a/playbooks/webapp/vars/main.yaml +++ b/playbooks/webapp/vars/main.yaml @@ -12,7 +12,7 @@ resource_tags: prefix: "{{ resource_prefix }}" operation: create -image_filter: Fedora-Cloud-Base-35-*gp2-0 +image_filter: Fedora-Cloud-Base-38-* public_secgroup_name: "{{ resource_prefix }}-sg" ec2_iam_role_name: "{{ resource_prefix }}-role" rds_subnet_group_name: "{{ resource_prefix }}-rds-sg" @@ -23,7 +23,7 @@ rds_instance_class: db.m6g.large rds_instance_name: mysampledb123 rds_engine: postgres rds_engine_version: "14.8" -bastion_host_type: t2.xlarge +bastion_host_type: t3.micro bastion_host_venv_path: ~/env rds_listening_port: 5432 @@ -31,7 +31,7 @@ rds_listening_port: 5432 deploy_flask_app_sshkey_pair_name: "{{ resource_prefix }}-key" deploy_flask_app_bastion_host_name: "{{ resource_prefix }}-bastion" deploy_flask_app_bastion_host_username: fedora -deploy_flask_app_workers_instance_type: t2.xlarge +deploy_flask_app_workers_instance_type: t3.micro deploy_flask_app_workers_user_name: fedora deploy_flask_app_number_of_workers: 2 deploy_flask_app_listening_port: 5000 diff --git a/roles/deploy_flask_app/tasks/main.yaml b/roles/deploy_flask_app/tasks/main.yaml index 93a2073a..c68cfbf7 100644 --- a/roles/deploy_flask_app/tasks/main.yaml +++ b/roles/deploy_flask_app/tasks/main.yaml @@ -18,3 +18,7 @@ - name: Start application container into workers ansible.builtin.include_tasks: start_containers.yaml + + - name: Display application URL + ansible.builtin.debug: + msg: "Application accessible at http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" diff --git a/tests/integration/targets/setup_rsa_keys/handlers/main.yml b/tests/integration/targets/setup_rsa_keys/handlers/main.yml index 03ea4269..04fcf9aa 100644 --- a/tests/integration/targets/setup_rsa_keys/handlers/main.yml +++ b/tests/integration/targets/setup_rsa_keys/handlers/main.yml @@ -1,6 +1,6 @@ --- -- name: Delete temporary RSA key directory +- name: Delete RSA key directory ansible.builtin.file: state: absent - path: "{{ setup_rsa_keys__tmpdir }}" + path: "{{ setup_rsa_keys__path }}" ignore_errors: true diff --git a/tests/integration/targets/setup_rsa_keys/tasks/main.yml b/tests/integration/targets/setup_rsa_keys/tasks/main.yml index 23c16c8a..8ef0edfe 100644 --- a/tests/integration/targets/setup_rsa_keys/tasks/main.yml +++ b/tests/integration/targets/setup_rsa_keys/tasks/main.yml @@ -19,6 +19,7 @@ ansible.builtin.file: path: "{{ setup_rsa_keys__path }}" state: directory + notify: 'Delete RSA key directory' - name: Generate RSA keys community.crypto.openssh_keypair: diff --git a/tests/integration/targets/test_deploy_flask_app/aliases b/tests/integration/targets/test_deploy_flask_app/aliases index de2fdf2f..c63f9525 100644 --- a/tests/integration/targets/test_deploy_flask_app/aliases +++ b/tests/integration/targets/test_deploy_flask_app/aliases @@ -1,3 +1,4 @@ cloud/aws role/deploy_flask_app time=35m +unstable diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml index c4f27422..cd99f8e0 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml @@ -76,7 +76,7 @@ state: absent with_items: "{{ route_table.route_tables | map(attribute='id') | list }}" - # Delete VPC route table + # Delete NAT Gateway - name: Get NAT gateway amazon.aws.ec2_vpc_nat_gateway_info: filters: diff --git a/tests/integration/targets/test_playbook_webapp/aliases b/tests/integration/targets/test_playbook_webapp/aliases new file mode 100644 index 00000000..87ff2914 --- /dev/null +++ b/tests/integration/targets/test_playbook_webapp/aliases @@ -0,0 +1,2 @@ +time=35m +cloud/aws diff --git a/tests/integration/targets/test_playbook_webapp/create_aws_credentials.yml b/tests/integration/targets/test_playbook_webapp/create_aws_credentials.yml new file mode 100644 index 00000000..ba5841b1 --- /dev/null +++ b/tests/integration/targets/test_playbook_webapp/create_aws_credentials.yml @@ -0,0 +1,13 @@ +--- +- hosts: localhost + connection: local + gather_facts: false + tasks: + - name: Write access key to file we can source + ansible.builtin.copy: + dest: access_key.sh + content: | + export AWS_ACCESS_KEY_ID="{{ aws_access_key }}" + export AWS_SECRET_ACCESS_KEY="{{ aws_secret_key }}" + export AWS_REGION="{{ aws_region }}" + export AWS_SECURITY_TOKEN="{{ aws_security_token }}" diff --git a/tests/integration/targets/test_playbook_webapp/runme.sh b/tests/integration/targets/test_playbook_webapp/runme.sh new file mode 100755 index 00000000..cf7f3c7f --- /dev/null +++ b/tests/integration/targets/test_playbook_webapp/runme.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +# generate inventory with access_key provided through a templated variable +ansible-playbook create_aws_credentials.yml "$@" +source access_key.sh + +set -eux + +function cleanup() { + set +x + source access_key.sh + set -x + ansible-playbook webapp.yaml -e "operation=delete" "$@" + exit 1 +} + +trap 'cleanup "${@}"' ERR + +# Create web application +ansible-playbook webapp.yaml "$@" + +# Delete web application +ansible-playbook webapp.yaml -e "operation=delete" "$@" \ No newline at end of file diff --git a/tests/integration/targets/test_playbook_webapp/test_webapp.yaml b/tests/integration/targets/test_playbook_webapp/test_webapp.yaml new file mode 100644 index 00000000..09ef0ea3 --- /dev/null +++ b/tests/integration/targets/test_playbook_webapp/test_webapp.yaml @@ -0,0 +1 @@ +- import_playbook: cloud.aws_ops.webapp.webapp From 37805f42e7e91c53315916e719245ce78099075b Mon Sep 17 00:00:00 2001 From: abikouo Date: Tue, 9 Jan 2024 12:25:34 +0100 Subject: [PATCH 25/51] Remove dependency with aws_setup_credentials --- roles/deploy_flask_app/meta/main.yaml | 4 ---- roles/deploy_flask_app/tasks/main.yaml | 6 +++++- 2 files changed, 5 insertions(+), 5 deletions(-) delete mode 100644 roles/deploy_flask_app/meta/main.yaml diff --git a/roles/deploy_flask_app/meta/main.yaml b/roles/deploy_flask_app/meta/main.yaml deleted file mode 100644 index 3bf1568b..00000000 --- a/roles/deploy_flask_app/meta/main.yaml +++ /dev/null @@ -1,4 +0,0 @@ ---- -dependencies: - - role: cloud.aws_ops.aws_setup_credentials -allow_duplicates: true diff --git a/roles/deploy_flask_app/tasks/main.yaml b/roles/deploy_flask_app/tasks/main.yaml index c68cfbf7..6b74ac11 100644 --- a/roles/deploy_flask_app/tasks/main.yaml +++ b/roles/deploy_flask_app/tasks/main.yaml @@ -1,7 +1,11 @@ --- - name: Deploy flask app. module_defaults: - group/aws: "{{ aws_setup_credentials__output }}" + group/aws: + aws_access_key: "{{ aws_access_key | default(omit) }}" + aws_secret_key: "{{ aws_secret_key | default(omit) }}" + security_token: "{{ security_token | default(omit) }}" + region: "{{ region | default(aws_region) }}" vars: deploy_flask_app__resource_prefix: "{{ deploy_flask_app_vm_info.instances.0.public_dns_name | split('.') | first }}" deploy_flask_app__group_id: "{{ deploy_flask_app_vm_info.instances.0.security_groups[0].group_id }}" From b45004cdd555a12f507b59172a3e6d18d7439399 Mon Sep 17 00:00:00 2001 From: abikouo Date: Tue, 9 Jan 2024 12:34:17 +0100 Subject: [PATCH 26/51] Remove unused playbook --- .../test_playbook_webapp/create_aws_credentials.yml | 13 ------------- .../targets/test_playbook_webapp/runme.sh | 13 +++---------- 2 files changed, 3 insertions(+), 23 deletions(-) delete mode 100644 tests/integration/targets/test_playbook_webapp/create_aws_credentials.yml diff --git a/tests/integration/targets/test_playbook_webapp/create_aws_credentials.yml b/tests/integration/targets/test_playbook_webapp/create_aws_credentials.yml deleted file mode 100644 index ba5841b1..00000000 --- a/tests/integration/targets/test_playbook_webapp/create_aws_credentials.yml +++ /dev/null @@ -1,13 +0,0 @@ ---- -- hosts: localhost - connection: local - gather_facts: false - tasks: - - name: Write access key to file we can source - ansible.builtin.copy: - dest: access_key.sh - content: | - export AWS_ACCESS_KEY_ID="{{ aws_access_key }}" - export AWS_SECRET_ACCESS_KEY="{{ aws_secret_key }}" - export AWS_REGION="{{ aws_region }}" - export AWS_SECURITY_TOKEN="{{ aws_security_token }}" diff --git a/tests/integration/targets/test_playbook_webapp/runme.sh b/tests/integration/targets/test_playbook_webapp/runme.sh index cf7f3c7f..fbb29492 100755 --- a/tests/integration/targets/test_playbook_webapp/runme.sh +++ b/tests/integration/targets/test_playbook_webapp/runme.sh @@ -1,23 +1,16 @@ #!/usr/bin/env bash -# generate inventory with access_key provided through a templated variable -ansible-playbook create_aws_credentials.yml "$@" -source access_key.sh - set -eux function cleanup() { - set +x - source access_key.sh - set -x - ansible-playbook webapp.yaml -e "operation=delete" "$@" + ansible-playbook test_webapp.yaml -e "operation=delete" "$@" exit 1 } trap 'cleanup "${@}"' ERR # Create web application -ansible-playbook webapp.yaml "$@" +ansible-playbook test_webapp.yaml "$@" # Delete web application -ansible-playbook webapp.yaml -e "operation=delete" "$@" \ No newline at end of file +ansible-playbook test_webapp.yaml -e "operation=delete" "$@" \ No newline at end of file From 09573218eb831d0d80c027aa9da26ac16c73e3c4 Mon Sep 17 00:00:00 2001 From: abikouo Date: Tue, 9 Jan 2024 14:44:09 +0100 Subject: [PATCH 27/51] add retries when trying to connect to ec2 instances --- .../tasks/start_containers.yaml | 24 +++++++++++++++++++ .../templates/deploy_app.yaml.j2 | 18 +++++++++++++- .../targets/test_deploy_flask_app/aliases | 1 - 3 files changed, 41 insertions(+), 2 deletions(-) diff --git a/roles/deploy_flask_app/tasks/start_containers.yaml b/roles/deploy_flask_app/tasks/start_containers.yaml index 1988f83c..ee875a66 100644 --- a/roles/deploy_flask_app/tasks/start_containers.yaml +++ b/roles/deploy_flask_app/tasks/start_containers.yaml @@ -44,6 +44,10 @@ path: "~/.ssh" - state: touch path: "~/.ssh/config" + retries: 5 + delay: 5 + until: result is successful + register: result - name: Update local .ssh/config ansible.builtin.blockinfile: @@ -51,18 +55,30 @@ insertafter: EOF dest: "~/.ssh/config" content: "{{ lookup('template', 'bastion_ssh_config.j2') }}" + retries: 5 + delay: 5 + until: result is successful + register: result - name: Copy remote ssh private key file into bastion ansible.builtin.copy: src: "{{ deploy_flask_app_bastion_ssh_private_key }}" dest: "{{ deploy_flask_app_workers_ssh_private_key }}" mode: 0400 + retries: 5 + delay: 5 + until: result is successful + register: result - name: Generate workers inventory file ansible.builtin.copy: content: "{{ lookup('template', 'workers_inventory.yaml.j2') }}" dest: "{{ deploy_flask_app_workers_inventory_file }}" mode: 0755 + retries: 5 + delay: 5 + until: result is successful + register: result - name: Generate playbook to deploy application ansible.builtin.copy: @@ -72,6 +88,10 @@ vars: deploy_flask_app_instances_list: "{{ deploy_flask_app_instances | join(',') }}" deploy_flask_app_worker_hostname: "{{ '{{' }} inventory_hostname {{ '}}' }}" + retries: 5 + delay: 5 + until: result is successful + register: result - name: Deploy application into workers ansible.builtin.command: >- @@ -79,3 +99,7 @@ --inventory {{ deploy_flask_app_workers_inventory_file }} {{ deploy_flask_app_workers_playbook_file }} -v + retries: 5 + delay: 5 + until: result is successful + register: result diff --git a/roles/deploy_flask_app/templates/deploy_app.yaml.j2 b/roles/deploy_flask_app/templates/deploy_app.yaml.j2 index 543890cc..7c39f042 100644 --- a/roles/deploy_flask_app/templates/deploy_app.yaml.j2 +++ b/roles/deploy_flask_app/templates/deploy_app.yaml.j2 @@ -12,6 +12,10 @@ block: | ClientAliveInterval 1200 ClientAliveCountMax 3 + retries: 5 + delay: 5 + until: result is successful + register: result - name: Install Podman ansible.builtin.yum: @@ -19,12 +23,20 @@ - podman update_cache: True state: present + retries: 5 + delay: 5 + until: result is successful + register: result - name: Check running container ansible.builtin.shell: cmd: "podman container ps -a -f name=webapp-container-1 --format=.Names" register: container changed_when: false + retries: 5 + delay: 5 + until: result is successful + register: result - name: Run application instance ansible.builtin.shell: @@ -46,4 +58,8 @@ -d {{ deploy_flask_app_container_image }} when: - container.stdout == "" - changed_when: true \ No newline at end of file + changed_when: true + retries: 5 + delay: 5 + until: result is successful + register: result \ No newline at end of file diff --git a/tests/integration/targets/test_deploy_flask_app/aliases b/tests/integration/targets/test_deploy_flask_app/aliases index c63f9525..de2fdf2f 100644 --- a/tests/integration/targets/test_deploy_flask_app/aliases +++ b/tests/integration/targets/test_deploy_flask_app/aliases @@ -1,4 +1,3 @@ cloud/aws role/deploy_flask_app time=35m -unstable From 136d1366c9a9832a0601cc012a29f60ceae3932c Mon Sep 17 00:00:00 2001 From: abikouo Date: Tue, 9 Jan 2024 17:49:13 +0100 Subject: [PATCH 28/51] minor updates --- roles/deploy_flask_app/tasks/setup_infra.yaml | 7 +-- .../tasks/start_containers.yaml | 7 +++ .../templates/deploy_app.yaml.j2 | 20 +------ .../test_deploy_flask_app/tasks/create.yaml | 56 +++++++++++-------- 4 files changed, 44 insertions(+), 46 deletions(-) diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index 388f2640..81fe2d60 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -70,9 +70,4 @@ subnets: - "{{ deploy_flask_app__public_subnet_id }}" scheme: internet-facing - wait: true - wait_timeout: 360 - retries: 5 - delay: 10 - until: deploy_flask_app_lb_result is successful - register: deploy_flask_app_lb_result + wait: false diff --git a/roles/deploy_flask_app/tasks/start_containers.yaml b/roles/deploy_flask_app/tasks/start_containers.yaml index ee875a66..8afcc369 100644 --- a/roles/deploy_flask_app/tasks/start_containers.yaml +++ b/roles/deploy_flask_app/tasks/start_containers.yaml @@ -103,3 +103,10 @@ delay: 5 until: result is successful register: result + + - name: "Ensure application is reachable on worker {{ worker.instance_id }}" + ansible.builtin.uri: + url: "http://{{ worker.private_ip_address }}:{{ deploy_flask_app_listening_port }}" + with_items: "{{ deploy_flask_app_vms.instances }}" + loop_control: + loop_var: worker diff --git a/roles/deploy_flask_app/templates/deploy_app.yaml.j2 b/roles/deploy_flask_app/templates/deploy_app.yaml.j2 index 7c39f042..357c13b2 100644 --- a/roles/deploy_flask_app/templates/deploy_app.yaml.j2 +++ b/roles/deploy_flask_app/templates/deploy_app.yaml.j2 @@ -12,31 +12,19 @@ block: | ClientAliveInterval 1200 ClientAliveCountMax 3 - retries: 5 - delay: 5 - until: result is successful - register: result - name: Install Podman ansible.builtin.yum: name: - podman - update_cache: True + update_cache: false state: present - retries: 5 - delay: 5 - until: result is successful - register: result - name: Check running container ansible.builtin.shell: cmd: "podman container ps -a -f name=webapp-container-1 --format=.Names" register: container changed_when: false - retries: 5 - delay: 5 - until: result is successful - register: result - name: Run application instance ansible.builtin.shell: @@ -58,8 +46,4 @@ -d {{ deploy_flask_app_container_image }} when: - container.stdout == "" - changed_when: true - retries: 5 - delay: 5 - until: result is successful - register: result \ No newline at end of file + changed_when: true \ No newline at end of file diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml index 6f898161..e6096492 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml @@ -74,9 +74,8 @@ amazon.aws.ec2_vpc_nat_gateway: subnet_id: "{{ subnet.subnet.id }}" if_exist_do_not_create: true - wait: true + wait: false # Long running tasks, the availability will checked later state: present - register: nat_gw - name: Create Route table for internet gateway (public subnet) amazon.aws.ec2_vpc_route_table: @@ -92,20 +91,6 @@ route: internet state: present - - name: Create Route table for NAT gateway (private subnet) - amazon.aws.ec2_vpc_route_table: - vpc_id: "{{ vpc.vpc.id }}" - subnets: - - "{{ private_subnet.subnet.id }}" - routes: - - dest: 0.0.0.0/0 - gateway_id: "{{ nat_gw.nat_gateway_id }}" - lookup: tag - resource_tags: - subnet: private - route: nat-gateway - state: present - - name: Create security group for bastion amazon.aws.ec2_security_group: name: "{{ public_secgroup_name }}" @@ -144,7 +129,6 @@ - name: Create RDS instance (PostGreSQL Database) amazon.aws.rds_instance: force_update_password: true - wait: true allocated_storage: "{{ rds_allocated_storage_gb }}" backup_retention_period: 0 db_instance_class: "{{ rds_instance_class }}" @@ -160,11 +144,7 @@ db_subnet_group_name: "{{ rds_subnet_group_name }}" vpc_security_group_ids: - "{{ rds_sg.group_id }}" - - - name: Get RDS instance info - amazon.aws.rds_instance_info: - db_instance_identifier: "{{ rds_identifier }}" - register: rds_result + wait: false - name: Create key pair to connect to the VM amazon.aws.ec2_key: @@ -201,3 +181,35 @@ wait: true state: started register: vm_result + + - name: Wait for the NAT gateway to be available + amazon.aws.ec2_vpc_nat_gateway_info: + filters: + subnet-id: "{{ subnet.subnet.id }}" + state: "available" + register: nat_gateway + retries: 60 + delay: 5 + until: nat_gateway.result | length > 0 + + - name: Create Route table for NAT gateway (private subnet) + amazon.aws.ec2_vpc_route_table: + vpc_id: "{{ vpc.vpc.id }}" + subnets: + - "{{ private_subnet.subnet.id }}" + routes: + - dest: 0.0.0.0/0 + gateway_id: "{{ nat_gateway.result.0.nat_gateway_id }}" + lookup: tag + resource_tags: + subnet: private + route: nat-gateway + state: present + + - name: Wait for the RDS instance to be available + amazon.aws.rds_instance_info: + db_instance_identifier: "{{ rds_identifier }}" + retries: 60 + delay: 5 + until: rds_result.instances.0.db_instance_status == "available", + register: rds_result From 8731e94d575204d3028ba052de84caf7384c7b2d Mon Sep 17 00:00:00 2001 From: abikouo Date: Tue, 9 Jan 2024 18:29:40 +0100 Subject: [PATCH 29/51] Remove all conditional checks --- roles/deploy_flask_app/tasks/setup_infra.yaml | 2 +- .../tasks/start_containers.yaml | 31 ------------------- .../targets/test_playbook_webapp/aliases | 1 + 3 files changed, 2 insertions(+), 32 deletions(-) diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index 81fe2d60..6a9ad341 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -70,4 +70,4 @@ subnets: - "{{ deploy_flask_app__public_subnet_id }}" scheme: internet-facing - wait: false + wait: true diff --git a/roles/deploy_flask_app/tasks/start_containers.yaml b/roles/deploy_flask_app/tasks/start_containers.yaml index 8afcc369..1988f83c 100644 --- a/roles/deploy_flask_app/tasks/start_containers.yaml +++ b/roles/deploy_flask_app/tasks/start_containers.yaml @@ -44,10 +44,6 @@ path: "~/.ssh" - state: touch path: "~/.ssh/config" - retries: 5 - delay: 5 - until: result is successful - register: result - name: Update local .ssh/config ansible.builtin.blockinfile: @@ -55,30 +51,18 @@ insertafter: EOF dest: "~/.ssh/config" content: "{{ lookup('template', 'bastion_ssh_config.j2') }}" - retries: 5 - delay: 5 - until: result is successful - register: result - name: Copy remote ssh private key file into bastion ansible.builtin.copy: src: "{{ deploy_flask_app_bastion_ssh_private_key }}" dest: "{{ deploy_flask_app_workers_ssh_private_key }}" mode: 0400 - retries: 5 - delay: 5 - until: result is successful - register: result - name: Generate workers inventory file ansible.builtin.copy: content: "{{ lookup('template', 'workers_inventory.yaml.j2') }}" dest: "{{ deploy_flask_app_workers_inventory_file }}" mode: 0755 - retries: 5 - delay: 5 - until: result is successful - register: result - name: Generate playbook to deploy application ansible.builtin.copy: @@ -88,10 +72,6 @@ vars: deploy_flask_app_instances_list: "{{ deploy_flask_app_instances | join(',') }}" deploy_flask_app_worker_hostname: "{{ '{{' }} inventory_hostname {{ '}}' }}" - retries: 5 - delay: 5 - until: result is successful - register: result - name: Deploy application into workers ansible.builtin.command: >- @@ -99,14 +79,3 @@ --inventory {{ deploy_flask_app_workers_inventory_file }} {{ deploy_flask_app_workers_playbook_file }} -v - retries: 5 - delay: 5 - until: result is successful - register: result - - - name: "Ensure application is reachable on worker {{ worker.instance_id }}" - ansible.builtin.uri: - url: "http://{{ worker.private_ip_address }}:{{ deploy_flask_app_listening_port }}" - with_items: "{{ deploy_flask_app_vms.instances }}" - loop_control: - loop_var: worker diff --git a/tests/integration/targets/test_playbook_webapp/aliases b/tests/integration/targets/test_playbook_webapp/aliases index 87ff2914..114b57f3 100644 --- a/tests/integration/targets/test_playbook_webapp/aliases +++ b/tests/integration/targets/test_playbook_webapp/aliases @@ -1,2 +1,3 @@ time=35m cloud/aws +disable \ No newline at end of file From 68588a10a8f8a97d4b3e504b48ac84d3e4b0ada2 Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 10 Jan 2024 12:19:39 +0100 Subject: [PATCH 30/51] update wait_timeout while creating load balancer --- roles/deploy_flask_app/tasks/setup_infra.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index 6a9ad341..29751a80 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -71,3 +71,4 @@ - "{{ deploy_flask_app__public_subnet_id }}" scheme: internet-facing wait: true + wait_timeout: 360 From e7056b1231422cfc74a0b4885bb2a3cac80987a7 Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 10 Jan 2024 13:03:41 +0100 Subject: [PATCH 31/51] temporary commit --- roles/deploy_flask_app/tasks/setup_infra.yaml | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index 29751a80..30b8134c 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -55,20 +55,20 @@ deploy_flask_app_instances: "{{ deploy_flask_app_instances + [item.instance_id + ':' + item.private_ip_address] }}" with_items: "{{ deploy_flask_app_vms.instances }}" - - name: Create load balancer - amazon.aws.elb_classic_lb: - state: present - name: "{{ deploy_flask_app__resource_prefix }}-lb" - listeners: - - load_balancer_port: "{{ deploy_flask_app_listening_port }}" - instance_port: 5000 - protocol: HTTP - instance_protocol: HTTP - instance_ids: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" - security_group_ids: - - "{{ deploy_flask_app__group_id }}" - subnets: - - "{{ deploy_flask_app__public_subnet_id }}" - scheme: internet-facing - wait: true - wait_timeout: 360 +# - name: Create load balancer +# amazon.aws.elb_classic_lb: +# state: present +# name: "{{ deploy_flask_app__resource_prefix }}-lb" +# listeners: +# - load_balancer_port: "{{ deploy_flask_app_listening_port }}" +# instance_port: 5000 +# protocol: HTTP +# instance_protocol: HTTP +# instance_ids: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" +# security_group_ids: +# - "{{ deploy_flask_app__group_id }}" +# subnets: +# - "{{ deploy_flask_app__public_subnet_id }}" +# scheme: internet-facing +# wait: true +# wait_timeout: 360 From 6dac13b2a459499408bee107abf2513faf7aa22d Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 17 Jan 2024 17:36:45 +0100 Subject: [PATCH 32/51] using roles --- roles/deploy_flask_app/README.md | 1 - .../deploy_flask_app/meta/argument_specs.yml | 4 --- roles/deploy_flask_app/tasks/main.yaml | 25 +++++++++---- roles/deploy_flask_app/tasks/setup_infra.yaml | 18 ---------- .../templates/deploy_app.yaml.j2 | 17 +++++++++ .../test_deploy_flask_app/defaults/main.yml | 1 - .../test_deploy_flask_app/meta/main.yml | 3 -- .../run_deploy_flask_app/defaults/main.yml | 2 ++ .../files/ec2-trust-policy.json | 0 .../meta/argument_specs.yml | 17 +++++++++ .../run_deploy_flask_app}/tasks/create.yaml | 16 ++++++--- .../run_deploy_flask_app}/tasks/delete.yaml | 0 .../run_deploy_flask_app/tasks/main.yaml | 3 ++ .../tasks/validate_create.yaml | 21 +++++++++++ .../tasks/validate_delete.yaml | 3 ++ .../run_deploy_flask_app}/vars/main.yaml | 1 + .../targets/test_deploy_flask_app/run.yaml | 12 +++++++ .../targets/test_deploy_flask_app/runme.sh | 16 +++++++++ .../test_deploy_flask_app/tasks/main.yaml | 35 ------------------- .../targets/test_playbook_webapp/aliases | 3 -- .../targets/test_playbook_webapp/runme.sh | 16 --------- .../test_playbook_webapp/test_webapp.yaml | 1 - 22 files changed, 122 insertions(+), 93 deletions(-) delete mode 100644 tests/integration/targets/test_deploy_flask_app/defaults/main.yml delete mode 100644 tests/integration/targets/test_deploy_flask_app/meta/main.yml create mode 100644 tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/defaults/main.yml rename tests/integration/targets/test_deploy_flask_app/{ => roles/run_deploy_flask_app}/files/ec2-trust-policy.json (100%) create mode 100644 tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/meta/argument_specs.yml rename tests/integration/targets/test_deploy_flask_app/{ => roles/run_deploy_flask_app}/tasks/create.yaml (94%) rename tests/integration/targets/test_deploy_flask_app/{ => roles/run_deploy_flask_app}/tasks/delete.yaml (100%) create mode 100644 tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/main.yaml create mode 100644 tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml create mode 100644 tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_delete.yaml rename tests/integration/targets/test_deploy_flask_app/{ => roles/run_deploy_flask_app}/vars/main.yaml (95%) create mode 100644 tests/integration/targets/test_deploy_flask_app/run.yaml create mode 100755 tests/integration/targets/test_deploy_flask_app/runme.sh delete mode 100644 tests/integration/targets/test_deploy_flask_app/tasks/main.yaml delete mode 100644 tests/integration/targets/test_playbook_webapp/aliases delete mode 100755 tests/integration/targets/test_playbook_webapp/runme.sh delete mode 100644 tests/integration/targets/test_playbook_webapp/test_webapp.yaml diff --git a/roles/deploy_flask_app/README.md b/roles/deploy_flask_app/README.md index 6b3ca73a..1e1638e2 100644 --- a/roles/deploy_flask_app/README.md +++ b/roles/deploy_flask_app/README.md @@ -23,7 +23,6 @@ Role Variables ## variables to create new hosts and groups in inventory of in memory playbook. -* **deploy_flask_app_region** (str): Region where the app is to be deployed. * **deploy_flask_app_private_subnet_id** (str): Private subnet id of the bastion host * **deploy_flask_app_vpc_id** (str): vpc id for the host. * **deploy_flask_app_rds_info** (dict): A dict of information for the backend RDS. This dict has the output of amazon.aws.rds_instance_info mode. diff --git a/roles/deploy_flask_app/meta/argument_specs.yml b/roles/deploy_flask_app/meta/argument_specs.yml index b7d5dc9d..2ff18b6f 100644 --- a/roles/deploy_flask_app/meta/argument_specs.yml +++ b/roles/deploy_flask_app/meta/argument_specs.yml @@ -4,10 +4,6 @@ argument_specs: version_added: 2.0.0 short_description: Deploy flask app in AWS. options: - deploy_flask_app_region: - description: (Optional) Region where the app has to be deployed. - type: str - required: False deploy_flask_app_bastion_host_username: description: Username for the bastion host SSH user. type: str diff --git a/roles/deploy_flask_app/tasks/main.yaml b/roles/deploy_flask_app/tasks/main.yaml index 6b74ac11..08a5a65e 100644 --- a/roles/deploy_flask_app/tasks/main.yaml +++ b/roles/deploy_flask_app/tasks/main.yaml @@ -1,11 +1,5 @@ --- - name: Deploy flask app. - module_defaults: - group/aws: - aws_access_key: "{{ aws_access_key | default(omit) }}" - aws_secret_key: "{{ aws_secret_key | default(omit) }}" - security_token: "{{ security_token | default(omit) }}" - region: "{{ region | default(aws_region) }}" vars: deploy_flask_app__resource_prefix: "{{ deploy_flask_app_vm_info.instances.0.public_dns_name | split('.') | first }}" deploy_flask_app__group_id: "{{ deploy_flask_app_vm_info.instances.0.security_groups[0].group_id }}" @@ -23,6 +17,25 @@ - name: Start application container into workers ansible.builtin.include_tasks: start_containers.yaml + - name: Create load balancer + amazon.aws.elb_classic_lb: + state: present + name: "{{ deploy_flask_app__resource_prefix }}-lb" + listeners: + - load_balancer_port: "{{ deploy_flask_app_listening_port }}" + instance_port: 5000 + protocol: HTTP + instance_protocol: HTTP + instance_ids: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" + security_group_ids: + - "{{ deploy_flask_app__group_id }}" + subnets: + - "{{ deploy_flask_app__public_subnet_id }}" + scheme: internet-facing + wait: true + wait_timeout: 600 + register: deploy_flask_app_lb_result + - name: Display application URL ansible.builtin.debug: msg: "Application accessible at http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index 30b8134c..7198c488 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -54,21 +54,3 @@ ansible.builtin.set_fact: deploy_flask_app_instances: "{{ deploy_flask_app_instances + [item.instance_id + ':' + item.private_ip_address] }}" with_items: "{{ deploy_flask_app_vms.instances }}" - -# - name: Create load balancer -# amazon.aws.elb_classic_lb: -# state: present -# name: "{{ deploy_flask_app__resource_prefix }}-lb" -# listeners: -# - load_balancer_port: "{{ deploy_flask_app_listening_port }}" -# instance_port: 5000 -# protocol: HTTP -# instance_protocol: HTTP -# instance_ids: "{{ deploy_flask_app_vms.instances | map(attribute='instance_id') | list }}" -# security_group_ids: -# - "{{ deploy_flask_app__group_id }}" -# subnets: -# - "{{ deploy_flask_app__public_subnet_id }}" -# scheme: internet-facing -# wait: true -# wait_timeout: 360 diff --git a/roles/deploy_flask_app/templates/deploy_app.yaml.j2 b/roles/deploy_flask_app/templates/deploy_app.yaml.j2 index 357c13b2..828f15f2 100644 --- a/roles/deploy_flask_app/templates/deploy_app.yaml.j2 +++ b/roles/deploy_flask_app/templates/deploy_app.yaml.j2 @@ -26,6 +26,23 @@ register: container changed_when: false + - name: Initialize database tables + ansible.builtin.shell: + cmd: > + podman run --rm + -e FLASK_APP="{{ deploy_flask_app_config.app_dir }}" + -e FLASK_ENV="{{ deploy_flask_app_config.env }}" + -e DATABASE_HOST="{{ deploy_flask_app__rds_host }}" + -e DATABASE_INSTANCE="{{ deploy_flask_app__rds_dbname }}" + -e DATABASE_USER="{{ deploy_flask_app_rds_master_username }}" + -e DATABASE_PASSWORD="{{ deploy_flask_app_rds_master_password }}" + -e ADMIN_USER="{{ deploy_flask_app_config.admin_user }}" + -e ADMIN_PASSWORD="{{ deploy_flask_app_config.admin_password }}" + -e WORKER_HOSTNAME="{{ deploy_flask_app_worker_hostname }}" + -e WORKERS_HOSTS="{{ deploy_flask_app_instances_list }}" + {{ deploy_flask_app_container_image }} flask {{ (deploy_flask_app_force_init | bool) | ternary('force-init-db', 'init-db') }} + run_once: true + - name: Run application instance ansible.builtin.shell: cmd: >- diff --git a/tests/integration/targets/test_deploy_flask_app/defaults/main.yml b/tests/integration/targets/test_deploy_flask_app/defaults/main.yml deleted file mode 100644 index 0e6574a5..00000000 --- a/tests/integration/targets/test_deploy_flask_app/defaults/main.yml +++ /dev/null @@ -1 +0,0 @@ -aws_security_token: '{{ security_token | default(omit) }}' diff --git a/tests/integration/targets/test_deploy_flask_app/meta/main.yml b/tests/integration/targets/test_deploy_flask_app/meta/main.yml deleted file mode 100644 index 3d8b9c14..00000000 --- a/tests/integration/targets/test_deploy_flask_app/meta/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -dependencies: - - role: setup_rsa_keys diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/defaults/main.yml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/defaults/main.yml new file mode 100644 index 00000000..275fd10e --- /dev/null +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/defaults/main.yml @@ -0,0 +1,2 @@ +--- +test_deploy_flask_app_operation: create diff --git a/tests/integration/targets/test_deploy_flask_app/files/ec2-trust-policy.json b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/files/ec2-trust-policy.json similarity index 100% rename from tests/integration/targets/test_deploy_flask_app/files/ec2-trust-policy.json rename to tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/files/ec2-trust-policy.json diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/meta/argument_specs.yml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/meta/argument_specs.yml new file mode 100644 index 00000000..a255cbf4 --- /dev/null +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/meta/argument_specs.yml @@ -0,0 +1,17 @@ +--- +argument_specs: + main: + version_added: 2.0.0 + short_description: Test role cloud.aws_ops.deploy_flask_app + options: + resource_prefix: + description: A resource prefix for AWS resources to be created. + type: str + required: true + run_deploy_flask_app_operation: + description: The operation to perform. + type: str + default: create + choices: + - create + - delete diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml similarity index 94% rename from tests/integration/targets/test_deploy_flask_app/tasks/create.yaml rename to tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml index e6096492..03969df1 100644 --- a/tests/integration/targets/test_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml @@ -1,8 +1,4 @@ --- -- name: Set 'region' variable - ansible.builtin.set_fact: - region: "{{ deploy_flask_app_region | default(aws_region) }}" - - name: Create resources playbook block: - name: Get image ID to create an instance @@ -146,10 +142,20 @@ - "{{ rds_sg.group_id }}" wait: false + # Create key pair to connect to the VM + - name: Create directory to generate keys in + ansible.builtin.file: + path: "{{ deploy_flask_app_bastion_rsa_key_dir }}" + state: directory + + - name: Generate RSA keys + community.crypto.openssh_keypair: + path: "{{ deploy_flask_app_bastion_rsa_key_dir }}/id_rsa" + - name: Create key pair to connect to the VM amazon.aws.ec2_key: name: "{{ deploy_flask_app_sshkey_pair_name }}" - key_material: "{{ lookup('file', setup_rsa_keys__public_key_file) }}" + key_material: "{{ lookup('file', deploy_flask_app_bastion_rsa_key_dir + '/id_rsa.pub') }}" - name: Ensure IAM instance role exists amazon.aws.iam_role: diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/delete.yaml similarity index 100% rename from tests/integration/targets/test_deploy_flask_app/tasks/delete.yaml rename to tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/delete.yaml diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/main.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/main.yaml new file mode 100644 index 00000000..1cc301d7 --- /dev/null +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/main.yaml @@ -0,0 +1,3 @@ +--- +- name: "Run test_deploy_flask_app" + ansible.builtin.include_tasks: "validate_{{ run_deploy_flask_app_operation }}.yaml" diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml new file mode 100644 index 00000000..6c612492 --- /dev/null +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml @@ -0,0 +1,21 @@ +--- +- name: Run operation create + ansible.builtin.include_tasks: "create.yaml" + +- name: Deploy resource from Bastion + ansible.builtin.include_role: + name: cloud.aws_ops.deploy_flask_app + vars: + deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" + deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" + deploy_flask_app_vm_info: "{{ vm_result }}" + deploy_flask_app_rds_info: "{{ rds_result }}" + deploy_flask_app_bastion_ssh_private_key: "{{ deploy_flask_app_bastion_rsa_key_dir }}/id_rsa" + +- name: Check that a page returns successfully + ansible.builtin.uri: + url: "http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" + register: deploy_flask_app_check + until: "deploy_flask_app_check.status == 200" + retries: 5 + delay: 10 diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_delete.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_delete.yaml new file mode 100644 index 00000000..8e97433d --- /dev/null +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_delete.yaml @@ -0,0 +1,3 @@ +--- +- name: Delete resources created for Flask application + ansible.builtin.include_tasks: "delete.yaml" diff --git a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/vars/main.yaml similarity index 95% rename from tests/integration/targets/test_deploy_flask_app/vars/main.yaml rename to tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/vars/main.yaml index 69c8a0db..64823dd2 100644 --- a/tests/integration/targets/test_deploy_flask_app/vars/main.yaml +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/vars/main.yaml @@ -45,3 +45,4 @@ deploy_flask_app_force_init: false deploy_flask_app_rds_master_password: L#5cH2mgy_ deploy_flask_app_rds_master_username: ansible +deploy_flask_app_bastion_rsa_key_dir: "~/.ssh-{{ resource_prefix }}" diff --git a/tests/integration/targets/test_deploy_flask_app/run.yaml b/tests/integration/targets/test_deploy_flask_app/run.yaml new file mode 100644 index 00000000..54dec912 --- /dev/null +++ b/tests/integration/targets/test_deploy_flask_app/run.yaml @@ -0,0 +1,12 @@ +- hosts: localhost + gather_facts: false + + module_defaults: + group/aws: + aws_access_key: "{{ aws_access_key }}" + aws_secret_key: "{{ aws_secret_key }}" + security_token: "{{ security_token | default(omit) }}" + region: "{{ aws_region }}" + + roles: + - role: run_deploy_flask_app diff --git a/tests/integration/targets/test_deploy_flask_app/runme.sh b/tests/integration/targets/test_deploy_flask_app/runme.sh new file mode 100755 index 00000000..9199df15 --- /dev/null +++ b/tests/integration/targets/test_deploy_flask_app/runme.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +set -eux + +function cleanup() { + ansible-playbook run.yaml -e "run_deploy_flask_app_operation=delete" "$@" + exit 1 +} + +trap 'cleanup "${@}"' ERR + +# Create web application +ansible-playbook run.yaml "$@" + +# Delete web application +ansible-playbook run.yaml -e "run_deploy_flask_app_operation=delete" "$@" \ No newline at end of file diff --git a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml b/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml deleted file mode 100644 index bc6f35b9..00000000 --- a/tests/integration/targets/test_deploy_flask_app/tasks/main.yaml +++ /dev/null @@ -1,35 +0,0 @@ ---- -- name: "Run deploy_flask_app integration tests" - module_defaults: - group/aws: - aws_access_key: "{{ aws_access_key }}" - aws_secret_key: "{{ aws_secret_key }}" - security_token: "{{ aws_security_token }}" - region: "{{ aws_region }}" - - block: - - name: Run operation create - ansible.builtin.include_tasks: "create.yaml" - - - name: Deploy resource from Bastion - ansible.builtin.include_role: - name: cloud.aws_ops.deploy_flask_app - vars: - deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" - deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" - deploy_flask_app_vm_info: "{{ vm_result }}" - deploy_flask_app_rds_info: "{{ rds_result }}" - deploy_flask_app_bastion_ssh_private_key: "{{ setup_rsa_keys__private_key_file }}" - - - name: Check that a page returns successfully - ansible.builtin.uri: - url: "http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" - register: deploy_flask_app_check - until: "deploy_flask_app_check.status == 200" - retries: 5 - delay: 10 - - always: - # Cleanup after ourselves - - name: Cleanup - ansible.builtin.include_tasks: "delete.yaml" diff --git a/tests/integration/targets/test_playbook_webapp/aliases b/tests/integration/targets/test_playbook_webapp/aliases deleted file mode 100644 index 114b57f3..00000000 --- a/tests/integration/targets/test_playbook_webapp/aliases +++ /dev/null @@ -1,3 +0,0 @@ -time=35m -cloud/aws -disable \ No newline at end of file diff --git a/tests/integration/targets/test_playbook_webapp/runme.sh b/tests/integration/targets/test_playbook_webapp/runme.sh deleted file mode 100755 index fbb29492..00000000 --- a/tests/integration/targets/test_playbook_webapp/runme.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env bash - -set -eux - -function cleanup() { - ansible-playbook test_webapp.yaml -e "operation=delete" "$@" - exit 1 -} - -trap 'cleanup "${@}"' ERR - -# Create web application -ansible-playbook test_webapp.yaml "$@" - -# Delete web application -ansible-playbook test_webapp.yaml -e "operation=delete" "$@" \ No newline at end of file diff --git a/tests/integration/targets/test_playbook_webapp/test_webapp.yaml b/tests/integration/targets/test_playbook_webapp/test_webapp.yaml deleted file mode 100644 index 09ef0ea3..00000000 --- a/tests/integration/targets/test_playbook_webapp/test_webapp.yaml +++ /dev/null @@ -1 +0,0 @@ -- import_playbook: cloud.aws_ops.webapp.webapp From b38b83ec8f1e56039bff1a22fbb72c764295cdda Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 17 Jan 2024 17:49:47 +0100 Subject: [PATCH 33/51] add missing paremeters --- .../run_deploy_flask_app/defaults/main.yml | 2 +- .../meta/argument_specs.yml | 17 ----------------- .../targets/test_deploy_flask_app/runme.sh | 2 +- 3 files changed, 2 insertions(+), 19 deletions(-) delete mode 100644 tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/meta/argument_specs.yml diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/defaults/main.yml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/defaults/main.yml index 275fd10e..d0dec9fd 100644 --- a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/defaults/main.yml +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/defaults/main.yml @@ -1,2 +1,2 @@ --- -test_deploy_flask_app_operation: create +run_deploy_flask_app_operation: create diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/meta/argument_specs.yml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/meta/argument_specs.yml deleted file mode 100644 index a255cbf4..00000000 --- a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/meta/argument_specs.yml +++ /dev/null @@ -1,17 +0,0 @@ ---- -argument_specs: - main: - version_added: 2.0.0 - short_description: Test role cloud.aws_ops.deploy_flask_app - options: - resource_prefix: - description: A resource prefix for AWS resources to be created. - type: str - required: true - run_deploy_flask_app_operation: - description: The operation to perform. - type: str - default: create - choices: - - create - - delete diff --git a/tests/integration/targets/test_deploy_flask_app/runme.sh b/tests/integration/targets/test_deploy_flask_app/runme.sh index 9199df15..bb3f1767 100755 --- a/tests/integration/targets/test_deploy_flask_app/runme.sh +++ b/tests/integration/targets/test_deploy_flask_app/runme.sh @@ -10,7 +10,7 @@ function cleanup() { trap 'cleanup "${@}"' ERR # Create web application -ansible-playbook run.yaml "$@" +ansible-playbook run.yaml -e "run_deploy_flask_app_operation=create" "$@" # Delete web application ansible-playbook run.yaml -e "run_deploy_flask_app_operation=delete" "$@" \ No newline at end of file From bd0a90778f52d5bb5d9d8cb4f11983a4fde46df7 Mon Sep 17 00:00:00 2001 From: abikouo Date: Thu, 18 Jan 2024 15:56:59 +0100 Subject: [PATCH 34/51] Update RDS result --- playbooks/webapp/webapp.yaml | 7 ------- .../roles/run_deploy_flask_app/tasks/create.yaml | 9 +-------- 2 files changed, 1 insertion(+), 15 deletions(-) diff --git a/playbooks/webapp/webapp.yaml b/playbooks/webapp/webapp.yaml index d27348f5..99101861 100644 --- a/playbooks/webapp/webapp.yaml +++ b/playbooks/webapp/webapp.yaml @@ -14,13 +14,6 @@ - name: Run operation create/delete ansible.builtin.import_tasks: tasks/{{ operation }}.yaml -- name: Deploy Flask App - hosts: localhost - gather_facts: false - vars_files: - - vars/main.yaml - - tasks: - name: Deploy app when: operation == "create" ansible.builtin.include_role: diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml index 03969df1..575b6ce7 100644 --- a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml @@ -141,6 +141,7 @@ vpc_security_group_ids: - "{{ rds_sg.group_id }}" wait: false + register: rds_result # Create key pair to connect to the VM - name: Create directory to generate keys in @@ -211,11 +212,3 @@ subnet: private route: nat-gateway state: present - - - name: Wait for the RDS instance to be available - amazon.aws.rds_instance_info: - db_instance_identifier: "{{ rds_identifier }}" - retries: 60 - delay: 5 - until: rds_result.instances.0.db_instance_status == "available", - register: rds_result From b760acc6f0a98358b16d21312adf45c443d7ebe5 Mon Sep 17 00:00:00 2001 From: abikouo Date: Thu, 18 Jan 2024 16:18:07 +0100 Subject: [PATCH 35/51] Revert update of tests/integration/targets/setup_rsa_keys --- .../targets/setup_rsa_keys/handlers/main.yml | 4 +-- .../targets/setup_rsa_keys/tasks/main.yml | 36 +++++++------------ 2 files changed, 15 insertions(+), 25 deletions(-) diff --git a/tests/integration/targets/setup_rsa_keys/handlers/main.yml b/tests/integration/targets/setup_rsa_keys/handlers/main.yml index 04fcf9aa..03ea4269 100644 --- a/tests/integration/targets/setup_rsa_keys/handlers/main.yml +++ b/tests/integration/targets/setup_rsa_keys/handlers/main.yml @@ -1,6 +1,6 @@ --- -- name: Delete RSA key directory +- name: Delete temporary RSA key directory ansible.builtin.file: state: absent - path: "{{ setup_rsa_keys__path }}" + path: "{{ setup_rsa_keys__tmpdir }}" ignore_errors: true diff --git a/tests/integration/targets/setup_rsa_keys/tasks/main.yml b/tests/integration/targets/setup_rsa_keys/tasks/main.yml index 8ef0edfe..1d0c94b8 100644 --- a/tests/integration/targets/setup_rsa_keys/tasks/main.yml +++ b/tests/integration/targets/setup_rsa_keys/tasks/main.yml @@ -1,26 +1,16 @@ --- -- name: Define path to private and public keys - ansible.builtin.set_fact: - setup_rsa_keys__public_key_file: "{{ setup_rsa_keys__path }}/id_rsa.pub" - setup_rsa_keys__private_key_file: "{{ setup_rsa_keys__path }}/id_rsa" - -- name: Check if ssh directory exists - ansible.builtin.stat: - path: "{{ item }}" - register: stats - with_items: - - "{{ setup_rsa_keys__public_key_file }}" - - "{{ setup_rsa_keys__private_key_file }}" +- name: Create temporary directory to generate keys + ansible.builtin.tempfile: + state: directory + suffix: ssh + register: setup_rsa_keys__tmpdir + notify: 'Delete temporary RSA key directory' -- name: Generate RSA keys file - when: stats.results | selectattr('stat.exists', 'equalto', false) | list | length > 0 - block: - - name: Create directory to generate keys in - ansible.builtin.file: - path: "{{ setup_rsa_keys__path }}" - state: directory - notify: 'Delete RSA key directory' +- name: Generate RSA keys + community.crypto.openssh_keypair: + path: "{{ setup_rsa_keys__tmpdir.path }}/id_rsa" - - name: Generate RSA keys - community.crypto.openssh_keypair: - path: "{{ setup_rsa_keys__path }}/id_rsa" +- name: Define path to private and public keys + ansible.builtin.set_fact: + setup_rsa_keys__public_key_file: "{{ setup_rsa_keys__tmpdir.path }}/id_rsa.pub" + setup_rsa_keys__private_key_file: "{{ setup_rsa_keys__tmpdir.path }}/id_rsa" From b32f8fc7e948a9a1dd030a21713e1e6b70af29c6 Mon Sep 17 00:00:00 2001 From: abikouo Date: Thu, 18 Jan 2024 17:19:29 +0100 Subject: [PATCH 36/51] Support storage of key pair into S3 bucket - Needed for running on AWX (container) --- playbooks/webapp/tasks/create.yaml | 17 +------ playbooks/webapp/tasks/delete.yaml | 5 ++ playbooks/webapp/tasks/manage_keypair.yaml | 51 +++++++++++++++++++ playbooks/webapp/vars/main.yaml | 3 ++ .../run_deploy_flask_app/tasks/create.yaml | 6 ++- .../tasks/validate_create.yaml | 5 +- 6 files changed, 69 insertions(+), 18 deletions(-) create mode 100644 playbooks/webapp/tasks/manage_keypair.yaml diff --git a/playbooks/webapp/tasks/create.yaml b/playbooks/webapp/tasks/create.yaml index 76d6e9e3..f4858329 100644 --- a/playbooks/webapp/tasks/create.yaml +++ b/playbooks/webapp/tasks/create.yaml @@ -202,21 +202,8 @@ db_instance_identifier: "{{ rds_identifier }}" register: rds_result - - name: Set 'sshkey_file' variable - ansible.builtin.set_fact: - sshkey_file: ~/private-key-{{ deploy_flask_app_sshkey_pair_name }}-{{ region | default(aws_region) }} - - - name: Create key pair to connect to the VM - amazon.aws.ec2_key: - name: "{{ deploy_flask_app_sshkey_pair_name }}" - register: rsa_key - - - name: Save private key into file - ansible.builtin.copy: - content: "{{ rsa_key.key.private_key }}" - dest: "{{ sshkey_file }}" - mode: 0400 - when: rsa_key is changed + - name: Create key pair to connect to the virtual machine + ansible.builtin.include_tasks: manage_keypair.yaml - name: Ensure IAM instance role exists amazon.aws.iam_role: diff --git a/playbooks/webapp/tasks/delete.yaml b/playbooks/webapp/tasks/delete.yaml index 929dace8..e70d0e10 100644 --- a/playbooks/webapp/tasks/delete.yaml +++ b/playbooks/webapp/tasks/delete.yaml @@ -8,6 +8,11 @@ region: "{{ region | default(aws_region) }}" block: + - name: Delete S3 bucket + amazon.aws.s3_bucket: + name: "{{ bucket_name }}" + state: absent + force: true - name: Get vpc information amazon.aws.ec2_vpc_net_info: diff --git a/playbooks/webapp/tasks/manage_keypair.yaml b/playbooks/webapp/tasks/manage_keypair.yaml new file mode 100644 index 00000000..c07517a7 --- /dev/null +++ b/playbooks/webapp/tasks/manage_keypair.yaml @@ -0,0 +1,51 @@ +--- +- name: Ensure S3 bucket exists + amazon.aws.s3_bucket: + name: "{{ bucket_name }}" + versioning: false + state: present + +- name: Check if object exists into bucket + amazon.aws.s3_object_info: + bucket_name: "{{ bucket_name }}" + object_name: "id_rsa" + register: existing_obj + ignore_errors: true + +- name: Create RSA key pair and Save Private key into S3 bucket + when: existing_obj is failed + block: + - name: Create key pair to connect to the VM + amazon.aws.ec2_key: + name: "{{ deploy_flask_app_sshkey_pair_name }}" + register: keypair + + - name: Put object into bucket + amazon.aws.s3_object: + bucket: "{{ bucket_name }}" + mode: put + object: "id_rsa" + content: "{{ keypair.key.private_key }}" + +- name: Download object as string + amazon.aws.s3_object: + bucket: "{{ bucket_name }}" + mode: getstr + object: "id_rsa" + register: downloadObj + +# Download RSA Key +- name: Create temporary file for Private RSA key + ansible.builtin.tempfile: + suffix: id_rsa + register: private_key + +- name: Save private key into file + ansible.builtin.copy: + content: "{{ downloadObj.contents }}" + dest: "{{ private_key.path }}" + mode: 0400 + +- name: Set variable for SSH private key file + ansible.builtin.set_fact: + sshkey_file: "{{ private_key.path }}" diff --git a/playbooks/webapp/vars/main.yaml b/playbooks/webapp/vars/main.yaml index d4a7fbc9..e62cfb73 100644 --- a/playbooks/webapp/vars/main.yaml +++ b/playbooks/webapp/vars/main.yaml @@ -60,3 +60,6 @@ rds_replica_cluster_instance_name: "{{ resource_prefix }}-replica-instance" # vars for route53 records route53_subdomain: "flaskapp" + +# A bucket to save RSA key into +bucket_name: "bucket-rsa-{{ resource_prefix }}" diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml index 575b6ce7..c25431f3 100644 --- a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml @@ -141,7 +141,6 @@ vpc_security_group_ids: - "{{ rds_sg.group_id }}" wait: false - register: rds_result # Create key pair to connect to the VM - name: Create directory to generate keys in @@ -212,3 +211,8 @@ subnet: private route: nat-gateway state: present + + - name: Get RDS instance info + amazon.aws.rds_instance_info: + db_instance_identifier: "{{ rds_identifier }}" + register: rds_result diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml index 6c612492..24cc73f4 100644 --- a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml @@ -17,5 +17,6 @@ url: "http://{{ deploy_flask_app_lb_result.elb.dns_name }}:{{ deploy_flask_app_listening_port }}" register: deploy_flask_app_check until: "deploy_flask_app_check.status == 200" - retries: 5 - delay: 10 + retries: 200 + delay: 3 + ignore_errors: true From b1e01ade32fa3a8ca089270af43fdb5e47b319f2 Mon Sep 17 00:00:00 2001 From: abikouo Date: Thu, 18 Jan 2024 17:43:49 +0100 Subject: [PATCH 37/51] Fix ansible-lint --- playbooks/webapp/webapp.yaml | 7 +++++++ roles/deploy_flask_app/meta/argument_specs.yml | 4 ++++ roles/deploy_flask_app/tasks/main.yaml | 1 + roles/deploy_flask_app/tasks/setup_infra.yaml | 3 +++ 4 files changed, 15 insertions(+) diff --git a/playbooks/webapp/webapp.yaml b/playbooks/webapp/webapp.yaml index 99101861..2e639f29 100644 --- a/playbooks/webapp/webapp.yaml +++ b/playbooks/webapp/webapp.yaml @@ -24,3 +24,10 @@ deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" deploy_flask_app_vm_info: "{{ vm_result }}" deploy_flask_app_rds_info: "{{ rds_result }}" + deploy_flask_app_region: "{{ aws_region }}" + + - name: Delete temporary ssh private key file + ansible.builtin.file: + state: absent + path: "{{ sshkey_file }}" + ignore_errors: true diff --git a/roles/deploy_flask_app/meta/argument_specs.yml b/roles/deploy_flask_app/meta/argument_specs.yml index 2ff18b6f..b7d5dc9d 100644 --- a/roles/deploy_flask_app/meta/argument_specs.yml +++ b/roles/deploy_flask_app/meta/argument_specs.yml @@ -4,6 +4,10 @@ argument_specs: version_added: 2.0.0 short_description: Deploy flask app in AWS. options: + deploy_flask_app_region: + description: (Optional) Region where the app has to be deployed. + type: str + required: False deploy_flask_app_bastion_host_username: description: Username for the bastion host SSH user. type: str diff --git a/roles/deploy_flask_app/tasks/main.yaml b/roles/deploy_flask_app/tasks/main.yaml index 08a5a65e..a9a63388 100644 --- a/roles/deploy_flask_app/tasks/main.yaml +++ b/roles/deploy_flask_app/tasks/main.yaml @@ -34,6 +34,7 @@ scheme: internet-facing wait: true wait_timeout: 600 + region: "{{ deploy_flask_app_region | default(omit) }}" register: deploy_flask_app_lb_result - name: Display application URL diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index 7198c488..f2da381a 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -12,6 +12,7 @@ key-name: "{{ deploy_flask_app_sshkey_pair_name }}" image-id: "{{ deploy_flask_app__vm_image_id }}" instance-state-name: running + region: "{{ deploy_flask_app_region | default(omit) }}" register: deploy_flask_app_vms - name: Compute number of instances to create/delete @@ -35,6 +36,7 @@ wait: true count: "{{ deploy_flask_app_expected_instances }}" state: started + region: "{{ deploy_flask_app_region | default(omit) }}" register: deploy_flask_app_workers when: deploy_flask_app_expected_instances | int > 0 @@ -45,6 +47,7 @@ key-name: "{{ deploy_flask_app_sshkey_pair_name }}" image-id: "{{ deploy_flask_app__vm_image_id }}" instance-state-name: running + region: "{{ deploy_flask_app_region | default(omit) }}" register: deploy_flask_app_vms - name: Create list of instances (join) From dd2eec056071f5e18c8097aaac644d7cf06bc6de Mon Sep 17 00:00:00 2001 From: abikouo Date: Fri, 19 Jan 2024 07:57:29 +0100 Subject: [PATCH 38/51] Conditionnal delete for sshkey_file --- playbooks/webapp/webapp.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/playbooks/webapp/webapp.yaml b/playbooks/webapp/webapp.yaml index 2e639f29..323b34da 100644 --- a/playbooks/webapp/webapp.yaml +++ b/playbooks/webapp/webapp.yaml @@ -30,4 +30,5 @@ ansible.builtin.file: state: absent path: "{{ sshkey_file }}" + when: sshkey_file is defined ignore_errors: true From 985b23cae4d1621e16b9431465089784fcc28358 Mon Sep 17 00:00:00 2001 From: abikouo Date: Fri, 19 Jan 2024 08:50:03 +0100 Subject: [PATCH 39/51] Remove confusing variable deploy_flask_app_region --- playbooks/webapp/migrate_webapp.yaml | 27 ++++++++++++------- playbooks/webapp/webapp.yaml | 27 ++++++++++++------- playbooks/webapp/webapp_ha_aurora.yaml | 9 +++++-- .../deploy_flask_app/meta/argument_specs.yml | 4 --- roles/deploy_flask_app/tasks/main.yaml | 1 - roles/deploy_flask_app/tasks/setup_infra.yaml | 3 --- 6 files changed, 41 insertions(+), 30 deletions(-) diff --git a/playbooks/webapp/migrate_webapp.yaml b/playbooks/webapp/migrate_webapp.yaml index 4aad09f8..f26e134a 100644 --- a/playbooks/webapp/migrate_webapp.yaml +++ b/playbooks/webapp/migrate_webapp.yaml @@ -53,17 +53,24 @@ rds_snapshot_arn: "{{ result.db_snapshot_arn }}" region: "{{ dest_region }}" - - name: Deploy app + - name: Create workers and deploy application when: operation == "create" - ansible.builtin.import_role: - name: cloud.aws_ops.deploy_flask_app - vars: - deploy_flask_app_bastion_ssh_private_key: "{{ sshkey_file }}" - deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" - deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" - deploy_flask_app_vm_info: "{{ vm_result }}" - deploy_flask_app_rds_info: "{{ rds_result }}" - deploy_flask_app_region: "{{ dest_region }}" + module_defaults: + group/aws: + aws_access_key: "{{ aws_access_key | default(omit) }}" + aws_secret_key: "{{ aws_secret_key | default(omit) }}" + security_token: "{{ security_token | default(omit) }}" + region: "{{ dest_region }}" + block: + - name: Deploy app + ansible.builtin.import_role: + name: cloud.aws_ops.deploy_flask_app + vars: + deploy_flask_app_bastion_ssh_private_key: "{{ sshkey_file }}" + deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" + deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" + deploy_flask_app_vm_info: "{{ vm_result }}" + deploy_flask_app_rds_info: "{{ rds_result }}" - name: Delete RDS snapshots from different regions amazon.aws.rds_instance_snapshot: diff --git a/playbooks/webapp/webapp.yaml b/playbooks/webapp/webapp.yaml index 323b34da..511debd7 100644 --- a/playbooks/webapp/webapp.yaml +++ b/playbooks/webapp/webapp.yaml @@ -14,17 +14,24 @@ - name: Run operation create/delete ansible.builtin.import_tasks: tasks/{{ operation }}.yaml - - name: Deploy app + - name: Create workers and deploy application when: operation == "create" - ansible.builtin.include_role: - name: cloud.aws_ops.deploy_flask_app - vars: - deploy_flask_app_bastion_ssh_private_key: "{{ sshkey_file }}" - deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" - deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" - deploy_flask_app_vm_info: "{{ vm_result }}" - deploy_flask_app_rds_info: "{{ rds_result }}" - deploy_flask_app_region: "{{ aws_region }}" + module_defaults: + group/aws: + aws_access_key: "{{ aws_access_key | default(omit) }}" + aws_secret_key: "{{ aws_secret_key | default(omit) }}" + security_token: "{{ security_token | default(omit) }}" + region: "{{ region | default(aws_region) }}" + block: + - name: Deploy app + ansible.builtin.include_role: + name: cloud.aws_ops.deploy_flask_app + vars: + deploy_flask_app_bastion_ssh_private_key: "{{ sshkey_file }}" + deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" + deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" + deploy_flask_app_vm_info: "{{ vm_result }}" + deploy_flask_app_rds_info: "{{ rds_result }}" - name: Delete temporary ssh private key file ansible.builtin.file: diff --git a/playbooks/webapp/webapp_ha_aurora.yaml b/playbooks/webapp/webapp_ha_aurora.yaml index fb5c6170..8daa3540 100644 --- a/playbooks/webapp/webapp_ha_aurora.yaml +++ b/playbooks/webapp/webapp_ha_aurora.yaml @@ -5,6 +5,13 @@ vars_files: - vars/main.yaml + module_defaults: + group/aws: + aws_access_key: "{{ aws_access_key | default(omit) }}" + aws_secret_key: "{{ aws_secret_key | default(omit) }}" + security_token: "{{ security_token | default(omit) }}" + region: "{{ region | default(aws_region) }}" + tasks: - name: Create resources and Deploy App when: operation == "create" @@ -62,7 +69,6 @@ deploy_flask_app_vpc_id: "{{ primary_vpc.vpcs[0].id }}" deploy_flask_app_vm_info: "{{ primary_vm_result }}" deploy_flask_app_rds_info: "{{ primary_instance_info_result }}" - deploy_flask_app_region: "{{ rds_primary_cluster_region }}" - name: Get load balancer name from the primary region ansible.builtin.set_fact: @@ -102,7 +108,6 @@ deploy_flask_app_vpc_id: "{{ replica_vpc.vpcs[0].id }}" deploy_flask_app_vm_info: "{{ replica_vm_result }}" deploy_flask_app_rds_info: "{{ replica_instance_info_result }}" - deploy_flask_app_region: "{{ rds_replica_cluster_region }}" - name: Get load balancer name from the replica region ansible.builtin.set_fact: diff --git a/roles/deploy_flask_app/meta/argument_specs.yml b/roles/deploy_flask_app/meta/argument_specs.yml index b7d5dc9d..2ff18b6f 100644 --- a/roles/deploy_flask_app/meta/argument_specs.yml +++ b/roles/deploy_flask_app/meta/argument_specs.yml @@ -4,10 +4,6 @@ argument_specs: version_added: 2.0.0 short_description: Deploy flask app in AWS. options: - deploy_flask_app_region: - description: (Optional) Region where the app has to be deployed. - type: str - required: False deploy_flask_app_bastion_host_username: description: Username for the bastion host SSH user. type: str diff --git a/roles/deploy_flask_app/tasks/main.yaml b/roles/deploy_flask_app/tasks/main.yaml index a9a63388..08a5a65e 100644 --- a/roles/deploy_flask_app/tasks/main.yaml +++ b/roles/deploy_flask_app/tasks/main.yaml @@ -34,7 +34,6 @@ scheme: internet-facing wait: true wait_timeout: 600 - region: "{{ deploy_flask_app_region | default(omit) }}" register: deploy_flask_app_lb_result - name: Display application URL diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index f2da381a..7198c488 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -12,7 +12,6 @@ key-name: "{{ deploy_flask_app_sshkey_pair_name }}" image-id: "{{ deploy_flask_app__vm_image_id }}" instance-state-name: running - region: "{{ deploy_flask_app_region | default(omit) }}" register: deploy_flask_app_vms - name: Compute number of instances to create/delete @@ -36,7 +35,6 @@ wait: true count: "{{ deploy_flask_app_expected_instances }}" state: started - region: "{{ deploy_flask_app_region | default(omit) }}" register: deploy_flask_app_workers when: deploy_flask_app_expected_instances | int > 0 @@ -47,7 +45,6 @@ key-name: "{{ deploy_flask_app_sshkey_pair_name }}" image-id: "{{ deploy_flask_app__vm_image_id }}" instance-state-name: running - region: "{{ deploy_flask_app_region | default(omit) }}" register: deploy_flask_app_vms - name: Create list of instances (join) From 3f13bc50faebe7dcb768891736778ab94b41d60b Mon Sep 17 00:00:00 2001 From: abikouo Date: Fri, 19 Jan 2024 09:00:17 +0100 Subject: [PATCH 40/51] fix linters --- playbooks/webapp/tasks/manage_keypair.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/playbooks/webapp/tasks/manage_keypair.yaml b/playbooks/webapp/tasks/manage_keypair.yaml index c07517a7..c99595a7 100644 --- a/playbooks/webapp/tasks/manage_keypair.yaml +++ b/playbooks/webapp/tasks/manage_keypair.yaml @@ -32,7 +32,7 @@ bucket: "{{ bucket_name }}" mode: getstr object: "id_rsa" - register: downloadObj + register: download_obj # Download RSA Key - name: Create temporary file for Private RSA key @@ -42,7 +42,7 @@ - name: Save private key into file ansible.builtin.copy: - content: "{{ downloadObj.contents }}" + content: "{{ download_obj.contents }}" dest: "{{ private_key.path }}" mode: 0400 From eba57a08bb584596f386e0110154d4f823cc0f5e Mon Sep 17 00:00:00 2001 From: abikouo Date: Fri, 26 Jan 2024 13:38:32 +0100 Subject: [PATCH 41/51] minor update for code review --- playbooks/webapp/migrate_webapp.yaml | 2 +- playbooks/webapp/tasks/delete.yaml | 6 +++++- playbooks/webapp/tasks/manage_keypair.yaml | 2 +- playbooks/webapp/webapp.yaml | 6 +++--- playbooks/webapp/webapp_ha_aurora.yaml | 4 ++-- roles/deploy_flask_app/templates/bastion_ssh_config.j2 | 2 +- roles/deploy_flask_app/templates/deploy_app.yaml.j2 | 2 +- roles/deploy_flask_app/templates/local_ssh_config.j2 | 2 +- roles/deploy_flask_app/templates/workers_inventory.yaml.j2 | 2 +- 9 files changed, 16 insertions(+), 12 deletions(-) diff --git a/playbooks/webapp/migrate_webapp.yaml b/playbooks/webapp/migrate_webapp.yaml index f26e134a..673ec395 100644 --- a/playbooks/webapp/migrate_webapp.yaml +++ b/playbooks/webapp/migrate_webapp.yaml @@ -66,7 +66,7 @@ ansible.builtin.import_role: name: cloud.aws_ops.deploy_flask_app vars: - deploy_flask_app_bastion_ssh_private_key: "{{ sshkey_file }}" + deploy_flask_app_bastion_ssh_private_key: "{{ ssh_key_file_path }}" deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" deploy_flask_app_vm_info: "{{ vm_result }}" diff --git a/playbooks/webapp/tasks/delete.yaml b/playbooks/webapp/tasks/delete.yaml index e70d0e10..9d5efd0f 100644 --- a/playbooks/webapp/tasks/delete.yaml +++ b/playbooks/webapp/tasks/delete.yaml @@ -32,12 +32,16 @@ community.aws.elb_classic_lb_info: register: load_balancers + - name: Set fact for list of load balancers to delete + ansible.builtin.set_fact: + load_balancers_to_delete: "{{ load_balancers.elbs | selectattr('vpc_id', 'equalto', vpc_id) | map(attribute='load_balancer_name') | list }}" + - name: Delete load balancer(s) amazon.aws.elb_classic_lb: name: "{{ item }}" wait: true state: absent - with_items: "{{ load_balancers.elbs | selectattr('vpc_id', 'equalto', vpc_id) | map(attribute='load_balancer_name') | list }}" + with_items: "{{ load_balancers_to_delete }}" # Delete EC2 instances - name: Get EC2 instance info diff --git a/playbooks/webapp/tasks/manage_keypair.yaml b/playbooks/webapp/tasks/manage_keypair.yaml index c99595a7..74404bb2 100644 --- a/playbooks/webapp/tasks/manage_keypair.yaml +++ b/playbooks/webapp/tasks/manage_keypair.yaml @@ -48,4 +48,4 @@ - name: Set variable for SSH private key file ansible.builtin.set_fact: - sshkey_file: "{{ private_key.path }}" + ssh_key_file_path: "{{ private_key.path }}" diff --git a/playbooks/webapp/webapp.yaml b/playbooks/webapp/webapp.yaml index 511debd7..3a446ccf 100644 --- a/playbooks/webapp/webapp.yaml +++ b/playbooks/webapp/webapp.yaml @@ -27,7 +27,7 @@ ansible.builtin.include_role: name: cloud.aws_ops.deploy_flask_app vars: - deploy_flask_app_bastion_ssh_private_key: "{{ sshkey_file }}" + deploy_flask_app_bastion_ssh_private_key: "{{ ssh_key_file_path }}" deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" deploy_flask_app_vm_info: "{{ vm_result }}" @@ -36,6 +36,6 @@ - name: Delete temporary ssh private key file ansible.builtin.file: state: absent - path: "{{ sshkey_file }}" - when: sshkey_file is defined + path: "{{ ssh_key_file_path }}" + when: ssh_key_file_path is defined ignore_errors: true diff --git a/playbooks/webapp/webapp_ha_aurora.yaml b/playbooks/webapp/webapp_ha_aurora.yaml index 8daa3540..07134b78 100644 --- a/playbooks/webapp/webapp_ha_aurora.yaml +++ b/playbooks/webapp/webapp_ha_aurora.yaml @@ -64,7 +64,7 @@ ansible.builtin.include_role: name: cloud.aws_ops.deploy_flask_app vars: - deploy_flask_app_bastion_ssh_private_key: "{{ sshkey_file }}" + deploy_flask_app_bastion_ssh_private_key: "{{ ssh_key_file_path }}" deploy_flask_app_private_subnet_id: "{{ primary_private_subnet.subnets[0].id }}" deploy_flask_app_vpc_id: "{{ primary_vpc.vpcs[0].id }}" deploy_flask_app_vm_info: "{{ primary_vm_result }}" @@ -103,7 +103,7 @@ ansible.builtin.include_role: name: cloud.aws_ops.deploy_flask_app vars: - deploy_flask_app_bastion_ssh_private_key: "{{ sshkey_file }}" + deploy_flask_app_bastion_ssh_private_key: "{{ ssh_key_file_path }}" deploy_flask_app_private_subnet_id: "{{ replica_private_subnet.subnets[0].id }}" deploy_flask_app_vpc_id: "{{ replica_vpc.vpcs[0].id }}" deploy_flask_app_vm_info: "{{ replica_vm_result }}" diff --git a/roles/deploy_flask_app/templates/bastion_ssh_config.j2 b/roles/deploy_flask_app/templates/bastion_ssh_config.j2 index ec4c2e10..97ddc59b 100644 --- a/roles/deploy_flask_app/templates/bastion_ssh_config.j2 +++ b/roles/deploy_flask_app/templates/bastion_ssh_config.j2 @@ -5,4 +5,4 @@ Host {{ item.instance_id }} IdentityFile {{ deploy_flask_app_workers_ssh_private_key }} StrictHostKeyChecking no UserKnownHostsFile /dev/null -{% endfor %} \ No newline at end of file +{% endfor %} diff --git a/roles/deploy_flask_app/templates/deploy_app.yaml.j2 b/roles/deploy_flask_app/templates/deploy_app.yaml.j2 index 828f15f2..af65d638 100644 --- a/roles/deploy_flask_app/templates/deploy_app.yaml.j2 +++ b/roles/deploy_flask_app/templates/deploy_app.yaml.j2 @@ -63,4 +63,4 @@ -d {{ deploy_flask_app_container_image }} when: - container.stdout == "" - changed_when: true \ No newline at end of file + changed_when: true diff --git a/roles/deploy_flask_app/templates/local_ssh_config.j2 b/roles/deploy_flask_app/templates/local_ssh_config.j2 index 6395d992..e039f2ff 100644 --- a/roles/deploy_flask_app/templates/local_ssh_config.j2 +++ b/roles/deploy_flask_app/templates/local_ssh_config.j2 @@ -3,4 +3,4 @@ Host bastion User {{ deploy_flask_app_bastion_host_username }} IdentityFile {{ deploy_flask_app_bastion_ssh_private_key }} StrictHostKeyChecking no - UserKnownHostsFile /dev/null \ No newline at end of file + UserKnownHostsFile /dev/null diff --git a/roles/deploy_flask_app/templates/workers_inventory.yaml.j2 b/roles/deploy_flask_app/templates/workers_inventory.yaml.j2 index 40219ae8..2e4b7331 100644 --- a/roles/deploy_flask_app/templates/workers_inventory.yaml.j2 +++ b/roles/deploy_flask_app/templates/workers_inventory.yaml.j2 @@ -3,4 +3,4 @@ all: {% for item in deploy_flask_app_vms.instances %} {{ item.instance_id }}: ansible_python_interpreter: auto -{% endfor %} \ No newline at end of file +{% endfor %} From 49187b8ff2ccd594764e9594988d740d027e3c66 Mon Sep 17 00:00:00 2001 From: Bikouo Aubin <79859644+abikouo@users.noreply.github.com> Date: Wed, 31 Jan 2024 08:48:01 +0100 Subject: [PATCH 42/51] Update 20231219-deploy_flask_app-update-arguments-spec.yml --- .../20231219-deploy_flask_app-update-arguments-spec.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml b/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml index a8f8cb62..a629aa91 100644 --- a/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml +++ b/changelogs/fragments/20231219-deploy_flask_app-update-arguments-spec.yml @@ -2,7 +2,7 @@ breaking_changes: - >- roles/deploy_flask_app - Add parameter ``deploy_flask_app_bastion_ssh_private_key`` to define - the path to the ssh private key file to use to connect to the bastion host (https://github.com/redhat-cop/cloud.aws_ops/issues/103). + the path to the ssh private key file to use to connect to the bastion host (https://github.com/redhat-cop/cloud.aws_ops/issues/109). - >- roles/deploy_flask_app - The following parameters no longer required have been removed ``deploy_flask_app_bastion_host_required_packages``, ``deploy_flask_app_local_registry_port``, From 0c620fc76d7060d7b18277ba37387e2095066f5f Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 31 Jan 2024 15:40:48 +0100 Subject: [PATCH 43/51] minor code review updates --- playbooks/webapp/migrate_webapp.yaml | 1 - playbooks/webapp/tasks/create.yaml | 17 ++++++- playbooks/webapp/tasks/manage_keypair.yaml | 51 ------------------- playbooks/webapp/webapp.yaml | 13 ++--- playbooks/webapp/webapp_ha_aurora.yaml | 2 - roles/deploy_flask_app/README.md | 11 ++-- .../deploy_flask_app/meta/argument_specs.yml | 21 ++++---- roles/deploy_flask_app/tasks/main.yaml | 12 +---- roles/deploy_flask_app/tasks/setup_infra.yaml | 47 +++++++++++++++-- .../tasks/start_containers.yaml | 38 -------------- .../templates/deploy_app.yaml.j2 | 8 +-- .../templates/local_ssh_config.j2 | 6 --- .../files/ec2-trust-policy.json | 2 +- .../tasks/validate_create.yaml | 7 +-- 14 files changed, 84 insertions(+), 152 deletions(-) delete mode 100644 playbooks/webapp/tasks/manage_keypair.yaml delete mode 100644 roles/deploy_flask_app/templates/local_ssh_config.j2 diff --git a/playbooks/webapp/migrate_webapp.yaml b/playbooks/webapp/migrate_webapp.yaml index 673ec395..461dd6a7 100644 --- a/playbooks/webapp/migrate_webapp.yaml +++ b/playbooks/webapp/migrate_webapp.yaml @@ -66,7 +66,6 @@ ansible.builtin.import_role: name: cloud.aws_ops.deploy_flask_app vars: - deploy_flask_app_bastion_ssh_private_key: "{{ ssh_key_file_path }}" deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" deploy_flask_app_vm_info: "{{ vm_result }}" diff --git a/playbooks/webapp/tasks/create.yaml b/playbooks/webapp/tasks/create.yaml index f4858329..865d978a 100644 --- a/playbooks/webapp/tasks/create.yaml +++ b/playbooks/webapp/tasks/create.yaml @@ -202,8 +202,21 @@ db_instance_identifier: "{{ rds_identifier }}" register: rds_result - - name: Create key pair to connect to the virtual machine - ansible.builtin.include_tasks: manage_keypair.yaml + - name: Set variable for SSH private key file path + ansible.builtin.set_fact: + deploy_flask_app_bastion_ssh_private_key_path: "~/.{{ resource_prefix }}_id_rsa" + + - name: Create key pair to connect to the workers + amazon.aws.ec2_key: + name: "{{ deploy_flask_app_sshkey_pair_name }}" + register: keypair_result + + - name: Save private key into a file + ansible.builtin.copy: + content: "{{ keypair_result.key.private_key }}" + dest: "{{ deploy_flask_app_bastion_ssh_private_key_path }}" + mode: 0600 + when: keypair_result is changed - name: Ensure IAM instance role exists amazon.aws.iam_role: diff --git a/playbooks/webapp/tasks/manage_keypair.yaml b/playbooks/webapp/tasks/manage_keypair.yaml deleted file mode 100644 index 74404bb2..00000000 --- a/playbooks/webapp/tasks/manage_keypair.yaml +++ /dev/null @@ -1,51 +0,0 @@ ---- -- name: Ensure S3 bucket exists - amazon.aws.s3_bucket: - name: "{{ bucket_name }}" - versioning: false - state: present - -- name: Check if object exists into bucket - amazon.aws.s3_object_info: - bucket_name: "{{ bucket_name }}" - object_name: "id_rsa" - register: existing_obj - ignore_errors: true - -- name: Create RSA key pair and Save Private key into S3 bucket - when: existing_obj is failed - block: - - name: Create key pair to connect to the VM - amazon.aws.ec2_key: - name: "{{ deploy_flask_app_sshkey_pair_name }}" - register: keypair - - - name: Put object into bucket - amazon.aws.s3_object: - bucket: "{{ bucket_name }}" - mode: put - object: "id_rsa" - content: "{{ keypair.key.private_key }}" - -- name: Download object as string - amazon.aws.s3_object: - bucket: "{{ bucket_name }}" - mode: getstr - object: "id_rsa" - register: download_obj - -# Download RSA Key -- name: Create temporary file for Private RSA key - ansible.builtin.tempfile: - suffix: id_rsa - register: private_key - -- name: Save private key into file - ansible.builtin.copy: - content: "{{ download_obj.contents }}" - dest: "{{ private_key.path }}" - mode: 0400 - -- name: Set variable for SSH private key file - ansible.builtin.set_fact: - ssh_key_file_path: "{{ private_key.path }}" diff --git a/playbooks/webapp/webapp.yaml b/playbooks/webapp/webapp.yaml index 3a446ccf..265a7162 100644 --- a/playbooks/webapp/webapp.yaml +++ b/playbooks/webapp/webapp.yaml @@ -27,15 +27,8 @@ ansible.builtin.include_role: name: cloud.aws_ops.deploy_flask_app vars: - deploy_flask_app_bastion_ssh_private_key: "{{ ssh_key_file_path }}" deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" - deploy_flask_app_vm_info: "{{ vm_result }}" - deploy_flask_app_rds_info: "{{ rds_result }}" - - - name: Delete temporary ssh private key file - ansible.builtin.file: - state: absent - path: "{{ ssh_key_file_path }}" - when: ssh_key_file_path is defined - ignore_errors: true + deploy_flask_app_bastion_instance_id: "{{ vm_result.instance_ids.0 }}" + deploy_flask_app_rds_host: "{{ rds_result.instances.0.endpoint.address }}" + deploy_flask_app_rds_dbname: "{{ rds_result.instances.0.db_name }}" diff --git a/playbooks/webapp/webapp_ha_aurora.yaml b/playbooks/webapp/webapp_ha_aurora.yaml index 07134b78..3801d0b0 100644 --- a/playbooks/webapp/webapp_ha_aurora.yaml +++ b/playbooks/webapp/webapp_ha_aurora.yaml @@ -64,7 +64,6 @@ ansible.builtin.include_role: name: cloud.aws_ops.deploy_flask_app vars: - deploy_flask_app_bastion_ssh_private_key: "{{ ssh_key_file_path }}" deploy_flask_app_private_subnet_id: "{{ primary_private_subnet.subnets[0].id }}" deploy_flask_app_vpc_id: "{{ primary_vpc.vpcs[0].id }}" deploy_flask_app_vm_info: "{{ primary_vm_result }}" @@ -103,7 +102,6 @@ ansible.builtin.include_role: name: cloud.aws_ops.deploy_flask_app vars: - deploy_flask_app_bastion_ssh_private_key: "{{ ssh_key_file_path }}" deploy_flask_app_private_subnet_id: "{{ replica_private_subnet.subnets[0].id }}" deploy_flask_app_vpc_id: "{{ replica_vpc.vpcs[0].id }}" deploy_flask_app_vm_info: "{{ replica_vm_result }}" diff --git a/roles/deploy_flask_app/README.md b/roles/deploy_flask_app/README.md index 1e1638e2..2a54087a 100644 --- a/roles/deploy_flask_app/README.md +++ b/roles/deploy_flask_app/README.md @@ -25,21 +25,18 @@ Role Variables * **deploy_flask_app_private_subnet_id** (str): Private subnet id of the bastion host * **deploy_flask_app_vpc_id** (str): vpc id for the host. -* **deploy_flask_app_rds_info** (dict): A dict of information for the backend RDS. This dict has the output of amazon.aws.rds_instance_info mode. +* **deploy_flask_app_rds_host** (str): The RDS endpoint address. +* **deploy_flask_app_rds_dbname** (str): The RDS database name. * **deploy_flask_app_rds_master_username** (str): Username for the RDS instance. * **deploy_flask_app_rds_master_password** (str): password for the RDS instance. -* **deploy_flask_app_vm_info** (dict): A dict of information for the vm to use. This dict has the output of amazon.aws.ec2_instance_info module. ## variables needed for the deployment # Bastion host -* **deploy_flask_app_bastion_host_name** (str): Name for the EC2 instance. * **deploy_flask_app_bastion_host_username** (str): Username for the bastion host SSH user. -* **deploy_flask_app_sshkey_pair_name** (str): Name for the EC2 key pair. -* **deploy_flask_app_bastion_ssh_private_key** (path): The path to the ssh private key file to use to connect to the bastion host. +* **deploy_flask_app_bastion_instance_id** (str): The instance id of the virtual machine used as bastion. +* **deploy_flask_app_bastion_ssh_private_key_path** (path): The path to the ssh private key file to use to connect to the bastion host. * **deploy_flask_app_number_of_workers** (int): Number of instances to create. -* **deploy_flask_app_workers_instance_type** (str): RC2 instance type for workers. -* **deploy_flask_app_workers_user_name** (str): Username for the workers. # App * **deploy_flask_app_listening_port** (int): Load balancer port. diff --git a/roles/deploy_flask_app/meta/argument_specs.yml b/roles/deploy_flask_app/meta/argument_specs.yml index 2ff18b6f..297c0741 100644 --- a/roles/deploy_flask_app/meta/argument_specs.yml +++ b/roles/deploy_flask_app/meta/argument_specs.yml @@ -8,11 +8,12 @@ argument_specs: description: Username for the bastion host SSH user. type: str required: True - deploy_flask_app_bastion_host_name: - description: Name for the EC2 instance. + deploy_flask_app_bastion_instance_id: + description: The instance Id of the EC2 bastion virtual machine. type: str required: True - deploy_flask_app_bastion_ssh_private_key: + version_added: 2.1.0 + deploy_flask_app_bastion_ssh_private_key_path: description: The path to ssh private key file to use to connect to the bastion host. type: path required: True @@ -25,13 +26,13 @@ argument_specs: description: vpc id for the host. type: str required: True - deploy_flask_app_sshkey_pair_name: - description: Name for the EC2 key pair. + deploy_flask_app_rds_host: + description: The RDS endpoint address. type: str required: True - deploy_flask_app_rds_info: - description: A dict of information for the backend RDS. This dict has the output of amazon.aws.rds_instance_info module. - type: dict + deploy_flask_app_rds_dbname: + description: The RDS Database name. + type: str required: True deploy_flask_app_rds_master_username: description: Master username of the RDS backend. @@ -41,10 +42,6 @@ argument_specs: description: Master password of the RDS backend. type: str required: True - deploy_flask_app_vm_info: - description: A dict of information for the vm to use. This dict has the output of amazon.aws.ec2_instance_info module. - type: dict - required: True deploy_flask_app_number_of_workers: description: Number of instances to create. type: int diff --git a/roles/deploy_flask_app/tasks/main.yaml b/roles/deploy_flask_app/tasks/main.yaml index 08a5a65e..6d796351 100644 --- a/roles/deploy_flask_app/tasks/main.yaml +++ b/roles/deploy_flask_app/tasks/main.yaml @@ -1,15 +1,5 @@ --- -- name: Deploy flask app. - vars: - deploy_flask_app__resource_prefix: "{{ deploy_flask_app_vm_info.instances.0.public_dns_name | split('.') | first }}" - deploy_flask_app__group_id: "{{ deploy_flask_app_vm_info.instances.0.security_groups[0].group_id }}" - deploy_flask_app__vm_image_id: "{{ deploy_flask_app_vm_info.instances.0.image_id }}" - deploy_flask_app__bastion_public_ip: "{{ deploy_flask_app_vm_info.instances.0.public_ip_address }}" - deploy_flask_app__bastion_private_ip: "{{ deploy_flask_app_vm_info.instances.0.private_ip_address }}" - deploy_flask_app__public_subnet_id: "{{ deploy_flask_app_vm_info.instances.0.subnet_id }}" - deploy_flask_app__private_subnet_id: "{{ deploy_flask_app_vm_info.instances.0.subnet_id }}" - deploy_flask_app__rds_host: "{{ deploy_flask_app_rds_info.instances.0.endpoint.address }}" - deploy_flask_app__rds_dbname: "{{ deploy_flask_app_rds_info.instances.0.db_name }}" +- name: Deploy flask application block: - name: Create infrastructure - workers and load balancer ansible.builtin.include_tasks: setup_infra.yaml diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index 7198c488..b36113a2 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -1,5 +1,44 @@ --- -- name: Create Cloud Resources (workers, load balancer, etc) +- name: Describe bastion instance + amazon.aws.ec2_instance_info: + instance_ids: + - "{{ deploy_flask_app_bastion_instance_id }}" + register: bastion_info + +- name: Set common variables to be used later + ansible.builtin.set_fact: + deploy_flask_app__resource_prefix: "{{ bastion_info.instances.0.public_dns_name | split('.') | first }}" + deploy_flask_app__group_id: "{{ bastion_info.instances.0.security_groups[0].group_id }}" + deploy_flask_app__vm_image_id: "{{ bastion_info.instances.0.image_id }}" + deploy_flask_app__bastion_public_ip: "{{ bastion_info.instances.0.public_ip_address }}" + deploy_flask_app__public_subnet_id: "{{ bastion_info.instances.0.subnet_id }}" + +- name: Add bastion host to inventory + ansible.builtin.add_host: + hostname: bastion + ansible_ssh_user: "{{ deploy_flask_app_bastion_host_username }}" + ansible_host: "{{ deploy_flask_app__bastion_public_ip }}" + ansible_python_interpreter: auto + ansible_ssh_common_args: '-o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_bastion_ssh_private_key_path }}' + +- name: Set variable for key pair + ansible.builtin.set_fact: + deploy_flask_app__workers_keypair_name: "{{ deploy_flask_app__resource_prefix }}-key" + +- name: Create key pair to connect to the workers + amazon.aws.ec2_key: + name: "{{ deploy_flask_app__workers_keypair_name }}" + register: keypair_result + +- name: Save key pair content into file on bastion host + ansible.builtin.copy: + content: "{{ keypair_result.key.private_key }}" + dest: "{{ deploy_flask_app_workers_ssh_private_key }}" + mode: 0600 + when: keypair_result is changed + delegate_to: bastion + +- name: Create workers instances block: - name: Set variables ansible.builtin.set_fact: @@ -9,7 +48,7 @@ amazon.aws.ec2_instance_info: filters: network-interface.subnet-id: "{{ deploy_flask_app_private_subnet_id }}" - key-name: "{{ deploy_flask_app_sshkey_pair_name }}" + key-name: "{{ deploy_flask_app__workers_keypair_name }}" image-id: "{{ deploy_flask_app__vm_image_id }}" instance-state-name: running register: deploy_flask_app_vms @@ -23,7 +62,7 @@ name: "{{ deploy_flask_app_instance_name }}" instance_type: "{{ deploy_flask_app_workers_instance_type }}" image_id: "{{ deploy_flask_app__vm_image_id }}" - key_name: "{{ deploy_flask_app_sshkey_pair_name }}" + key_name: "{{ deploy_flask_app__workers_keypair_name }}" subnet_id: "{{ deploy_flask_app_private_subnet_id }}" network: assign_public_ip: false @@ -42,7 +81,7 @@ amazon.aws.ec2_instance_info: filters: network-interface.subnet-id: "{{ deploy_flask_app_private_subnet_id }}" - key-name: "{{ deploy_flask_app_sshkey_pair_name }}" + key-name: "{{ deploy_flask_app__workers_keypair_name }}" image-id: "{{ deploy_flask_app__vm_image_id }}" instance-state-name: running register: deploy_flask_app_vms diff --git a/roles/deploy_flask_app/tasks/start_containers.yaml b/roles/deploy_flask_app/tasks/start_containers.yaml index 1988f83c..420eb16e 100644 --- a/roles/deploy_flask_app/tasks/start_containers.yaml +++ b/roles/deploy_flask_app/tasks/start_containers.yaml @@ -1,36 +1,4 @@ --- -# Configure local ssh config -- name: Create ssh configuration files - ansible.builtin.file: - state: "{{ item.state }}" - path: "{{ item.path }}" - mode: '0755' - with_items: - - state: directory - path: "~/.ssh" - - state: touch - path: "~/.ssh/config" - -- name: Update local .ssh/config - ansible.builtin.blockinfile: - state: present - insertafter: EOF - dest: "~/.ssh/config" - content: "{{ lookup('template', 'local_ssh_config.j2') }}" - -- name: Add bastion host into inventory - ansible.builtin.add_host: - hostname: bastion - ansible_python_interpreter: auto - ansible_host_name: bastion - -- name: Update local .ssh/config - ansible.builtin.blockinfile: - state: present - insertafter: EOF - dest: "~/.ssh/config" - content: "{{ lookup('template', 'local_ssh_config.j2') }}" - - name: Configure bastion delegate_to: bastion block: @@ -52,12 +20,6 @@ dest: "~/.ssh/config" content: "{{ lookup('template', 'bastion_ssh_config.j2') }}" - - name: Copy remote ssh private key file into bastion - ansible.builtin.copy: - src: "{{ deploy_flask_app_bastion_ssh_private_key }}" - dest: "{{ deploy_flask_app_workers_ssh_private_key }}" - mode: 0400 - - name: Generate workers inventory file ansible.builtin.copy: content: "{{ lookup('template', 'workers_inventory.yaml.j2') }}" diff --git a/roles/deploy_flask_app/templates/deploy_app.yaml.j2 b/roles/deploy_flask_app/templates/deploy_app.yaml.j2 index af65d638..c1fe7d9e 100644 --- a/roles/deploy_flask_app/templates/deploy_app.yaml.j2 +++ b/roles/deploy_flask_app/templates/deploy_app.yaml.j2 @@ -32,8 +32,8 @@ podman run --rm -e FLASK_APP="{{ deploy_flask_app_config.app_dir }}" -e FLASK_ENV="{{ deploy_flask_app_config.env }}" - -e DATABASE_HOST="{{ deploy_flask_app__rds_host }}" - -e DATABASE_INSTANCE="{{ deploy_flask_app__rds_dbname }}" + -e DATABASE_HOST="{{ deploy_flask_app_rds_host }}" + -e DATABASE_INSTANCE="{{ deploy_flask_app_rds_dbname }}" -e DATABASE_USER="{{ deploy_flask_app_rds_master_username }}" -e DATABASE_PASSWORD="{{ deploy_flask_app_rds_master_password }}" -e ADMIN_USER="{{ deploy_flask_app_config.admin_user }}" @@ -50,8 +50,8 @@ --rm -e FLASK_APP="{{ deploy_flask_app_config.app_dir }}" -e FLASK_ENV="{{ deploy_flask_app_config.env }}" - -e DATABASE_HOST="{{ deploy_flask_app__rds_host }}" - -e DATABASE_INSTANCE="{{ deploy_flask_app__rds_dbname }}" + -e DATABASE_HOST="{{ deploy_flask_app_rds_host }}" + -e DATABASE_INSTANCE="{{ deploy_flask_app_rds_dbname }}" -e DATABASE_USER="{{ deploy_flask_app_rds_master_username }}" -e DATABASE_PASSWORD="{{ deploy_flask_app_rds_master_password }}" -e ADMIN_USER="{{ deploy_flask_app_config.admin_user }}" diff --git a/roles/deploy_flask_app/templates/local_ssh_config.j2 b/roles/deploy_flask_app/templates/local_ssh_config.j2 deleted file mode 100644 index e039f2ff..00000000 --- a/roles/deploy_flask_app/templates/local_ssh_config.j2 +++ /dev/null @@ -1,6 +0,0 @@ -Host bastion - HostName {{ deploy_flask_app__bastion_public_ip }} - User {{ deploy_flask_app_bastion_host_username }} - IdentityFile {{ deploy_flask_app_bastion_ssh_private_key }} - StrictHostKeyChecking no - UserKnownHostsFile /dev/null diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/files/ec2-trust-policy.json b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/files/ec2-trust-policy.json index 63d22eae..840205bd 100644 --- a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/files/ec2-trust-policy.json +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/files/ec2-trust-policy.json @@ -10,4 +10,4 @@ "Action": "sts:AssumeRole" } ] - } +} diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml index 24cc73f4..69202c6b 100644 --- a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml @@ -8,9 +8,10 @@ vars: deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" - deploy_flask_app_vm_info: "{{ vm_result }}" - deploy_flask_app_rds_info: "{{ rds_result }}" - deploy_flask_app_bastion_ssh_private_key: "{{ deploy_flask_app_bastion_rsa_key_dir }}/id_rsa" + deploy_flask_app_bastion_instance_id: "{{ vm_result.instance_ids.0 }}" + deploy_flask_app_rds_host: "{{ rds_result.instances.0.endpoint.address }}" + deploy_flask_app_rds_dbname: "{{ rds_result.instances.0.db_name }}" + deploy_flask_app_bastion_ssh_private_key_path: "{{ deploy_flask_app_bastion_rsa_key_dir }}/id_rsa" - name: Check that a page returns successfully ansible.builtin.uri: From d6ea9877ed851a5811afb3a640e120d78abc2229 Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 31 Jan 2024 15:51:56 +0100 Subject: [PATCH 44/51] add missing deletion steps --- playbooks/webapp/tasks/delete.yaml | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/playbooks/webapp/tasks/delete.yaml b/playbooks/webapp/tasks/delete.yaml index 9d5efd0f..5e8f111e 100644 --- a/playbooks/webapp/tasks/delete.yaml +++ b/playbooks/webapp/tasks/delete.yaml @@ -8,12 +8,6 @@ region: "{{ region | default(aws_region) }}" block: - - name: Delete S3 bucket - amazon.aws.s3_bucket: - name: "{{ bucket_name }}" - state: absent - force: true - - name: Get vpc information amazon.aws.ec2_vpc_net_info: filters: @@ -65,10 +59,11 @@ skip_final_snapshot: true wait: true - - name: Delete key pair to connect to the bastion VM + - name: Delete keys pairs created for the deployment amazon.aws.ec2_key: - name: "{{ deploy_flask_app_sshkey_pair_name }}" + name: "{{ item }}" state: absent + with_items: "{{ ec2_instances.instances | map(attribute='key_name') | unique | list }}" - name: Delete RDS subnet group amazon.aws.rds_subnet_group: From 4c4e49815fd54802ee71a0a825f63d24f9189891 Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 31 Jan 2024 16:00:00 +0100 Subject: [PATCH 45/51] Wait for the RDS instance to be available --- .../roles/run_deploy_flask_app/tasks/create.yaml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml index c25431f3..6489f236 100644 --- a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/create.yaml @@ -140,7 +140,8 @@ db_subnet_group_name: "{{ rds_subnet_group_name }}" vpc_security_group_ids: - "{{ rds_sg.group_id }}" - wait: false + wait: true + register: rds_result # Create key pair to connect to the VM - name: Create directory to generate keys in @@ -211,8 +212,3 @@ subnet: private route: nat-gateway state: present - - - name: Get RDS instance info - amazon.aws.rds_instance_info: - db_instance_identifier: "{{ rds_identifier }}" - register: rds_result From a9813d0eca91b73eabc0c2f7230f1cc08aabf77d Mon Sep 17 00:00:00 2001 From: abikouo Date: Wed, 31 Jan 2024 17:41:20 +0100 Subject: [PATCH 46/51] RDS result update --- .../roles/run_deploy_flask_app/tasks/validate_create.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml index 69202c6b..da946961 100644 --- a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/validate_create.yaml @@ -9,8 +9,8 @@ deploy_flask_app_private_subnet_id: "{{ private_subnet.subnet.id }}" deploy_flask_app_vpc_id: "{{ vpc.vpc.id }}" deploy_flask_app_bastion_instance_id: "{{ vm_result.instance_ids.0 }}" - deploy_flask_app_rds_host: "{{ rds_result.instances.0.endpoint.address }}" - deploy_flask_app_rds_dbname: "{{ rds_result.instances.0.db_name }}" + deploy_flask_app_rds_host: "{{ rds_result.endpoint.address }}" + deploy_flask_app_rds_dbname: "{{ rds_result.db_name }}" deploy_flask_app_bastion_ssh_private_key_path: "{{ deploy_flask_app_bastion_rsa_key_dir }}/id_rsa" - name: Check that a page returns successfully From f89c274b4abe2840c98db5ea105e6af29eb538b2 Mon Sep 17 00:00:00 2001 From: abikouo Date: Thu, 1 Feb 2024 12:24:42 +0100 Subject: [PATCH 47/51] delete additional key pairs --- .../roles/run_deploy_flask_app/tasks/delete.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/delete.yaml b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/delete.yaml index cd99f8e0..73f1b384 100644 --- a/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/delete.yaml +++ b/tests/integration/targets/test_deploy_flask_app/roles/run_deploy_flask_app/tasks/delete.yaml @@ -44,8 +44,9 @@ # Delete EC2 key pair - name: Delete EC2 key pair amazon.aws.ec2_key: - name: "{{ deploy_flask_app_sshkey_pair_name }}" + name: "{{ item }}" state: absent + with_items: "{{ ec2_instances.instances | map(attribute='key_name') | unique | list }}" # Delete RDS information - name: Delete RDS instance From dbb3c18d2c97882af80afe26f3cd537b4936b3ff Mon Sep 17 00:00:00 2001 From: abikouo Date: Tue, 6 Feb 2024 11:00:24 +0100 Subject: [PATCH 48/51] Fixing issue with webapp_ha_aurora playbook --- .../webapp/tasks/add_route53_records.yaml | 2 + playbooks/webapp/tasks/create.yaml | 1 + .../tasks/create_aurora_db_cluster.yaml | 12 - .../webapp/tasks/deploy_app_into_region.yaml | 43 ++++ playbooks/webapp/vars/main.yaml | 2 +- playbooks/webapp/webapp_ha_aurora.yaml | 211 ++++++++---------- roles/create_rds_global_cluster/README.md | 6 +- .../meta/argument_specs.yml | 6 +- .../tasks/create.yml | 15 ++ roles/deploy_flask_app/tasks/setup_infra.yaml | 13 +- .../tasks/start_containers.yaml | 2 +- 11 files changed, 174 insertions(+), 139 deletions(-) create mode 100644 playbooks/webapp/tasks/deploy_app_into_region.yaml diff --git a/playbooks/webapp/tasks/add_route53_records.yaml b/playbooks/webapp/tasks/add_route53_records.yaml index 70935d5f..3cd2d2bc 100644 --- a/playbooks/webapp/tasks/add_route53_records.yaml +++ b/playbooks/webapp/tasks/add_route53_records.yaml @@ -43,6 +43,7 @@ failover: "PRIMARY" health_check: "{{ healthchk_primary_result.health_check.id }}" alias_hosted_zone_id: "{{ primary_lb.elb.hosted_zone_id }}" + overwrite: true register: alias_record_primary_result - name: Add an alias record that points to an aws ELB in the replica region @@ -57,6 +58,7 @@ failover: "SECONDARY" health_check: "{{ healthchk_replica_result.health_check.id }}" alias_hosted_zone_id: "{{ replica_lb.elb.hosted_zone_id }}" + overwrite: true register: alias_record_replica_result - name: Pause for 30 secs for the alias records to be active diff --git a/playbooks/webapp/tasks/create.yaml b/playbooks/webapp/tasks/create.yaml index 865d978a..68f459bb 100644 --- a/playbooks/webapp/tasks/create.yaml +++ b/playbooks/webapp/tasks/create.yaml @@ -205,6 +205,7 @@ - name: Set variable for SSH private key file path ansible.builtin.set_fact: deploy_flask_app_bastion_ssh_private_key_path: "~/.{{ resource_prefix }}_id_rsa" + when: deploy_flask_app_bastion_ssh_private_key_path is undefined - name: Create key pair to connect to the workers amazon.aws.ec2_key: diff --git a/playbooks/webapp/tasks/create_aurora_db_cluster.yaml b/playbooks/webapp/tasks/create_aurora_db_cluster.yaml index e92c4b9f..6c1acddf 100644 --- a/playbooks/webapp/tasks/create_aurora_db_cluster.yaml +++ b/playbooks/webapp/tasks/create_aurora_db_cluster.yaml @@ -44,12 +44,6 @@ create_rds_global_cluster_replica_cluster_vpc_security_group_ids: - "{{ rds_replica_sg.security_groups[0].group_id }}" - - name: Get primary instance info - amazon.aws.rds_instance_info: - db_instance_identifier: "{{ rds_primary_cluster_instance_name }}" - region: "{{ rds_primary_cluster_region }}" - register: primary_instance_info_result - - name: Get primary cluster info amazon.aws.rds_cluster_info: db_cluster_identifier: "{{ rds_primary_cluster_name }}" @@ -62,12 +56,6 @@ region: "{{ rds_replica_cluster_region }}" register: replica_cluster_info_result - - name: Get replica instance info - amazon.aws.rds_instance_info: - db_instance_identifier: "{{ rds_replica_cluster_instance_name }}" - region: "{{ rds_replica_cluster_region }}" - register: replica_instance_info_result - - name: Get global db info amazon.aws.rds_global_cluster_info: global_cluster_identifier: "{{ rds_global_cluster_name }}" diff --git a/playbooks/webapp/tasks/deploy_app_into_region.yaml b/playbooks/webapp/tasks/deploy_app_into_region.yaml new file mode 100644 index 00000000..8aad0dd1 --- /dev/null +++ b/playbooks/webapp/tasks/deploy_app_into_region.yaml @@ -0,0 +1,43 @@ +--- +- name: Deploy application into regions + module_defaults: + group/aws: + aws_access_key: "{{ aws_access_key | default(omit) }}" + aws_secret_key: "{{ aws_secret_key | default(omit) }}" + security_token: "{{ security_token | default(omit) }}" + region: "{{ region }}" + block: + - name: Get VPC info + amazon.aws.ec2_vpc_net_info: + filters: + "tag:Name": "{{ vpc_name }}" + register: vpc_result + + - name: Get Private Subnet for Workers + amazon.aws.ec2_vpc_subnet_info: + filters: + vpc-id: "{{ vpc_result.vpcs[0].id }}" + cidr: "{{ subnet_cidr[1] }}" + register: _subnets + + - name: Get VM info + amazon.aws.ec2_instance_info: + filters: + "tag:Name": "{{ deploy_flask_app_bastion_host_name }}" + instance-state-name: ["running"] + register: vm_result + + - name: Get RDS instance info + amazon.aws.rds_instance_info: + db_instance_identifier: "{{ rds_cluster_name }}" + register: rds_result + + - name: Deploy app into region + ansible.builtin.include_role: + name: cloud.aws_ops.deploy_flask_app + vars: + deploy_flask_app_private_subnet_id: "{{ _subnets.subnets[0].id }}" + deploy_flask_app_vpc_id: "{{ vpc_result.vpcs[0].id }}" + deploy_flask_app_bastion_instance_id: "{{ vm_result.instances.0.instance_id }}" + deploy_flask_app_rds_host: "{{ rds_result.instances.0.endpoint.address }}" + deploy_flask_app_rds_dbname: "{{ rds_result.instances.0.db_name }}" diff --git a/playbooks/webapp/vars/main.yaml b/playbooks/webapp/vars/main.yaml index e62cfb73..21c893bf 100644 --- a/playbooks/webapp/vars/main.yaml +++ b/playbooks/webapp/vars/main.yaml @@ -59,7 +59,7 @@ rds_replica_cluster_region: us-east-2 rds_replica_cluster_instance_name: "{{ resource_prefix }}-replica-instance" # vars for route53 records -route53_subdomain: "flaskapp" +route53_subdomain: "ansiblecloud.xyz" # A bucket to save RSA key into bucket_name: "bucket-rsa-{{ resource_prefix }}" diff --git a/playbooks/webapp/webapp_ha_aurora.yaml b/playbooks/webapp/webapp_ha_aurora.yaml index 3801d0b0..4576d9bc 100644 --- a/playbooks/webapp/webapp_ha_aurora.yaml +++ b/playbooks/webapp/webapp_ha_aurora.yaml @@ -1,111 +1,86 @@ --- -- name: Webapp HA +- name: Configure inventory for High availability Aurora cluster hosts: localhost gather_facts: false + vars_files: - vars/main.yaml - module_defaults: - group/aws: - aws_access_key: "{{ aws_access_key | default(omit) }}" - aws_secret_key: "{{ aws_secret_key | default(omit) }}" - security_token: "{{ security_token | default(omit) }}" - region: "{{ region | default(aws_region) }}" + tasks: + - name: Add different hosts + ansible.builtin.add_host: + groups: + - aurora + name: "aurora_{{ item.region }}" + ansible_connection: local + region: "{{ item.region }}" + ansible_python_interpreter: "{{ ansible_python_interpreter }}" + deploy_flask_app_bastion_ssh_private_key_path: "~/.{{ resource_prefix }}{{ item.region }}_id_rsa" + rds_cluster_name: "{{ item.rds_cluster_name }}" + with_items: + - region: "{{ rds_primary_cluster_region }}" + rds_cluster_name: "{{ rds_primary_cluster_instance_name }}" + - region: "{{ rds_replica_cluster_region }}" + rds_cluster_name: "{{ rds_replica_cluster_instance_name }}" + +- name: Webapp HA + hosts: aurora + gather_facts: false + strategy: free + + vars_files: + - vars/main.yaml tasks: - - name: Create resources and Deploy App + - name: Create resources in region + ansible.builtin.include_tasks: tasks/create.yaml when: operation == "create" - block: - - name: Create resources in primary region - ansible.builtin.include_tasks: tasks/create.yaml - vars: - region: "{{ creation_region }}" - rds_instance_class: db.r5.large - rds_engine: aurora-postgresql - loop: - - "{{ rds_primary_cluster_region }}" - - "{{ rds_replica_cluster_region }}" - loop_control: - loop_var: creation_region - - - name: Create Aurora db cluster - ansible.builtin.import_tasks: tasks/create_aurora_db_cluster.yaml - vars: - rds_instance_class: db.r5.large - rds_engine: aurora-postgresql + vars: + rds_instance_class: db.r5.large + rds_engine: aurora-postgresql - # ================= Deploy App in the primary region ================= +- name: Create Aurora db cluster + hosts: localhost + vars_files: + - vars/main.yaml - - name: Get VPC info from primary region - amazon.aws.ec2_vpc_net_info: - filters: - "tag:Name": "{{ vpc_name }}" - region: "{{ rds_primary_cluster_region }}" - register: primary_vpc + tasks: + - name: Create Aurora db cluster + ansible.builtin.import_tasks: tasks/create_aurora_db_cluster.yaml + when: operation == "create" + vars: + rds_instance_class: db.r5.large + rds_engine: aurora-postgresql - - name: Get primary private subnet for workers - amazon.aws.ec2_vpc_subnet_info: - filters: - vpc-id: "{{ primary_vpc.vpcs[0].id }}" - cidr: "{{ subnet_cidr[1] }}" - region: "{{ rds_primary_cluster_region }}" - register: primary_private_subnet +- name: Deploy application into regions + hosts: localhost + gather_facts: false - - name: Get VM info in the primary region - amazon.aws.ec2_instance_info: - filters: - "tag:Name": "{{ deploy_flask_app_bastion_host_name }}" - instance-state-name: [ "running"] - region: "{{ rds_primary_cluster_region }}" - register: primary_vm_result + vars_files: + - vars/main.yaml - - name: Deploy app in primary region - ansible.builtin.include_role: - name: cloud.aws_ops.deploy_flask_app + tasks: + - name: Deplou application and add Route53 records + when: operation == "create" + block: + - name: Deploy application into primary region + ansible.builtin.import_tasks: tasks/deploy_app_into_region.yaml vars: - deploy_flask_app_private_subnet_id: "{{ primary_private_subnet.subnets[0].id }}" - deploy_flask_app_vpc_id: "{{ primary_vpc.vpcs[0].id }}" - deploy_flask_app_vm_info: "{{ primary_vm_result }}" - deploy_flask_app_rds_info: "{{ primary_instance_info_result }}" + region: "{{ rds_primary_cluster_region }}" + rds_cluster_name: "{{ rds_primary_cluster_instance_name }}" + deploy_flask_app_bastion_ssh_private_key_path: "~/.{{ resource_prefix }}{{ rds_primary_cluster_region }}_id_rsa" - name: Get load balancer name from the primary region ansible.builtin.set_fact: primary_lb: "{{ deploy_flask_app_lb_result }}" - # ================= Deploy App in the replica region ================= - - - name: Get VPC info from replica region - amazon.aws.ec2_vpc_net_info: - filters: - "tag:Name": "{{ vpc_name }}" - region: "{{ rds_replica_cluster_region }}" - register: replica_vpc - - - name: Get VM info in the replica region - amazon.aws.ec2_instance_info: - filters: - "tag:Name": "{{ deploy_flask_app_bastion_host_name }}" - instance-state-name: [ "running"] - region: "{{ rds_replica_cluster_region }}" - register: replica_vm_result - - - name: Get replica private subnet for workers - amazon.aws.ec2_vpc_subnet_info: - filters: - vpc-id: "{{ replica_vpc.vpcs[0].id }}" - cidr: "{{ subnet_cidr[1] }}" - region: "{{ rds_replica_cluster_region }}" - register: replica_private_subnet - - - name: Deploy app in replica region - ansible.builtin.include_role: - name: cloud.aws_ops.deploy_flask_app + - name: Deploy application into replica region + ansible.builtin.import_tasks: tasks/deploy_app_into_region.yaml vars: - deploy_flask_app_private_subnet_id: "{{ replica_private_subnet.subnets[0].id }}" - deploy_flask_app_vpc_id: "{{ replica_vpc.vpcs[0].id }}" - deploy_flask_app_vm_info: "{{ replica_vm_result }}" - deploy_flask_app_rds_info: "{{ replica_instance_info_result }}" + region: "{{ rds_replica_cluster_region }}" + rds_cluster_name: "{{ rds_replica_cluster_instance_name }}" + deploy_flask_app_bastion_ssh_private_key_path: "~/.{{ resource_prefix }}{{ rds_replica_cluster_region }}_id_rsa" - name: Get load balancer name from the replica region ansible.builtin.set_fact: @@ -114,34 +89,44 @@ - name: Add Route53 configurations ansible.builtin.include_tasks: tasks/add_route53_records.yaml - # ================================================================================ +# ================================================================================ + +- name: Delete Route53 records and Aurora cluster + hosts: localhost + gather_facts: false + + vars_files: + - vars/main.yaml - - name: Delete resources + tasks: + - name: Delete Route 53 records and health checks + ansible.builtin.import_tasks: tasks/delete_route53_records.yaml when: operation == "delete" - block: - - name: Delete Route 53 records and health checks - ansible.builtin.import_tasks: tasks/delete_route53_records.yaml + - name: Delete Aurora DB + ansible.builtin.include_role: + name: cloud.aws_ops.create_rds_global_cluster + vars: + create_rds_global_cluster_operation: delete + create_rds_global_cluster_global_cluster_name: "{{ rds_global_cluster_name }}" + create_rds_global_cluster_primary_cluster_name: "{{ rds_primary_cluster_name }}" + create_rds_global_cluster_primary_cluster_region: "{{ rds_primary_cluster_region }}" + create_rds_global_cluster_primary_cluster_instance_name: "{{ rds_primary_cluster_instance_name }}" + create_rds_global_cluster_replica_cluster_name: "{{ rds_replica_cluster_name }}" + create_rds_global_cluster_replica_cluster_region: "{{ rds_replica_cluster_region }}" + create_rds_global_cluster_replica_cluster_instance_name: "{{ rds_replica_cluster_instance_name }}" + +- name: Delete EC2 resources + hosts: aurora + gather_facts: false + strategy: free - - name: Delete Aurora DB - ansible.builtin.include_role: - name: cloud.aws_ops.create_rds_global_cluster - vars: - create_rds_global_cluster_operation: delete - create_rds_global_cluster_global_cluster_name: "{{ rds_global_cluster_name }}" - create_rds_global_cluster_primary_cluster_name: "{{ rds_primary_cluster_name }}" - create_rds_global_cluster_primary_cluster_region: "{{ rds_primary_cluster_region }}" - create_rds_global_cluster_primary_cluster_instance_name: "{{ rds_primary_cluster_instance_name }}" - create_rds_global_cluster_replica_cluster_name: "{{ rds_replica_cluster_name }}" - create_rds_global_cluster_replica_cluster_region: "{{ rds_replica_cluster_region }}" - create_rds_global_cluster_replica_cluster_instance_name: "{{ rds_replica_cluster_instance_name }}" - - - name: Delete all resources - ansible.builtin.include_tasks: tasks/delete.yaml - vars: - region: "{{ deletion_region }}" - loop: - - "{{ rds_primary_cluster_region }}" - - "{{ rds_replica_cluster_region }}" - loop_control: - loop_var: deletion_region + vars_files: + - vars/main.yaml + + tasks: + - name: Delete all resources + ansible.builtin.include_tasks: tasks/delete.yaml + when: operation == "delete" + vars: + rds_identifier: "{{ rds_cluster_name }}" diff --git a/roles/create_rds_global_cluster/README.md b/roles/create_rds_global_cluster/README.md index 51f8202b..fbaa88f4 100644 --- a/roles/create_rds_global_cluster/README.md +++ b/roles/create_rds_global_cluster/README.md @@ -32,9 +32,9 @@ Role Variables - **create_rds_global_cluster_global_cluster_name** - Name of the Amazon Aurora global cluster. **required** - **create_rds_global_cluster_engine** - Engine of the Amazon Aurora global and rds clusters. Default is aurora-postgresql. - **create_rds_global_cluster_engine_version** - Engine version of the Amazon Aurora global and rds clusters. -- **create_rds_global_cluster_instance_class** - Instance class of instance in primary and replica cluster. **required** -- **create_rds_global_cluster_master_username** - Username of the rds clusters master user. **required** -- **create_rds_global_cluster_master_user_password** - Password of the rds clusters master user. **required** +- **create_rds_global_cluster_instance_class** - Instance class of instance in primary and replica cluster. **Required** when __create_rds_global_cluster_operation__ is set to __create__. +- **create_rds_global_cluster_master_username** - Username of the rds clusters master user. **Required** when __create_rds_global_cluster_operation__ is set to __create__. +- **create_rds_global_cluster_master_user_password** - Password of the rds clusters master user. **Required** when __create_rds_global_cluster_operation__ is set to __create__. **Primary cluster variables** - **create_rds_global_cluster_primary_cluster_name** - Name of the primary cluster. Default is $create_rds_global_cluster_global_cluster_name. diff --git a/roles/create_rds_global_cluster/meta/argument_specs.yml b/roles/create_rds_global_cluster/meta/argument_specs.yml index 15046a2f..d95cb6d0 100644 --- a/roles/create_rds_global_cluster/meta/argument_specs.yml +++ b/roles/create_rds_global_cluster/meta/argument_specs.yml @@ -20,15 +20,15 @@ argument_specs: create_rds_global_cluster_instance_class: description: - Instance class of instance in primary and replica cluster. - required: true + - Required when I(create_rds_global_cluster_operation=create). create_rds_global_cluster_master_username: description: - Username of the rds clusters master user. - required: true + - Required when I(create_rds_global_cluster_operation=create). create_rds_global_cluster_master_user_password: description: - Password of the rds clusters master user. - required: true + - Required when I(create_rds_global_cluster_operation=create). create_rds_global_cluster_primary_cluster_name: description: - Name of the primary cluster. diff --git a/roles/create_rds_global_cluster/tasks/create.yml b/roles/create_rds_global_cluster/tasks/create.yml index d7f4a171..ae3f82b0 100644 --- a/roles/create_rds_global_cluster/tasks/create.yml +++ b/roles/create_rds_global_cluster/tasks/create.yml @@ -5,6 +5,21 @@ group/amazon.cloud.aws: "{{ aws_setup_credentials__output }}" block: + - name: Fail when 'create_rds_global_cluster_instance_class' is not defined + ansible.builtin.fail: + msg: "'create_rds_global_cluster_instance_class' is required to create the global cluster." + when: create_rds_global_cluster_instance_class is undefined + + - name: Fail when 'create_rds_global_cluster_master_username' is not defined + ansible.builtin.fail: + msg: "'create_rds_global_cluster_master_username' is required to create the global cluster." + when: create_rds_global_cluster_master_username is undefined + + - name: Fail when 'create_rds_global_cluster_master_user_password' is not defined + ansible.builtin.fail: + msg: "'create_rds_global_cluster_master_user_password' is required to create the global cluster." + when: create_rds_global_cluster_master_user_password is undefined + - name: Create rds global database amazon.cloud.rds_global_cluster: global_cluster_identifier: "{{ create_rds_global_cluster_global_cluster_name }}" diff --git a/roles/deploy_flask_app/tasks/setup_infra.yaml b/roles/deploy_flask_app/tasks/setup_infra.yaml index b36113a2..0b52bded 100644 --- a/roles/deploy_flask_app/tasks/setup_infra.yaml +++ b/roles/deploy_flask_app/tasks/setup_infra.yaml @@ -13,18 +13,19 @@ deploy_flask_app__bastion_public_ip: "{{ bastion_info.instances.0.public_ip_address }}" deploy_flask_app__public_subnet_id: "{{ bastion_info.instances.0.subnet_id }}" +- name: Set variable for key pair and bastion hostname + ansible.builtin.set_fact: + deploy_flask_app__workers_keypair_name: "{{ deploy_flask_app__resource_prefix }}-key" + deploy_flask_app__bastion_hostname: "{{ deploy_flask_app__resource_prefix }}-bastion" + - name: Add bastion host to inventory ansible.builtin.add_host: - hostname: bastion + hostname: "{{ deploy_flask_app__bastion_hostname }}" ansible_ssh_user: "{{ deploy_flask_app_bastion_host_username }}" ansible_host: "{{ deploy_flask_app__bastion_public_ip }}" ansible_python_interpreter: auto ansible_ssh_common_args: '-o "UserKnownHostsFile=/dev/null" -o StrictHostKeyChecking=no -i {{ deploy_flask_app_bastion_ssh_private_key_path }}' -- name: Set variable for key pair - ansible.builtin.set_fact: - deploy_flask_app__workers_keypair_name: "{{ deploy_flask_app__resource_prefix }}-key" - - name: Create key pair to connect to the workers amazon.aws.ec2_key: name: "{{ deploy_flask_app__workers_keypair_name }}" @@ -36,7 +37,7 @@ dest: "{{ deploy_flask_app_workers_ssh_private_key }}" mode: 0600 when: keypair_result is changed - delegate_to: bastion + delegate_to: "{{ deploy_flask_app__bastion_hostname }}" - name: Create workers instances block: diff --git a/roles/deploy_flask_app/tasks/start_containers.yaml b/roles/deploy_flask_app/tasks/start_containers.yaml index 420eb16e..9d3f1c66 100644 --- a/roles/deploy_flask_app/tasks/start_containers.yaml +++ b/roles/deploy_flask_app/tasks/start_containers.yaml @@ -1,6 +1,6 @@ --- - name: Configure bastion - delegate_to: bastion + delegate_to: "{{ deploy_flask_app__bastion_hostname }}" block: - name: Create ssh configuration files ansible.builtin.file: From ca71f7efc073342043d71f562022645e6db093ce Mon Sep 17 00:00:00 2001 From: abikouo Date: Tue, 6 Feb 2024 13:42:51 +0100 Subject: [PATCH 49/51] conditional delete for global cluster --- playbooks/webapp/webapp_ha_aurora.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/playbooks/webapp/webapp_ha_aurora.yaml b/playbooks/webapp/webapp_ha_aurora.yaml index 4576d9bc..ad0b20ca 100644 --- a/playbooks/webapp/webapp_ha_aurora.yaml +++ b/playbooks/webapp/webapp_ha_aurora.yaml @@ -106,6 +106,7 @@ - name: Delete Aurora DB ansible.builtin.include_role: name: cloud.aws_ops.create_rds_global_cluster + when: operation == "delete" vars: create_rds_global_cluster_operation: delete create_rds_global_cluster_global_cluster_name: "{{ rds_global_cluster_name }}" From 960b9ebd512ef0aa42fa410bb6749ce7af7f660d Mon Sep 17 00:00:00 2001 From: abikouo Date: Tue, 6 Feb 2024 16:58:25 +0100 Subject: [PATCH 50/51] fix minor issue --- playbooks/webapp/webapp_ha_aurora.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/playbooks/webapp/webapp_ha_aurora.yaml b/playbooks/webapp/webapp_ha_aurora.yaml index ad0b20ca..f030b776 100644 --- a/playbooks/webapp/webapp_ha_aurora.yaml +++ b/playbooks/webapp/webapp_ha_aurora.yaml @@ -61,7 +61,7 @@ - vars/main.yaml tasks: - - name: Deplou application and add Route53 records + - name: Deploy application and add Route53 records when: operation == "create" block: - name: Deploy application into primary region From 4de99e53b03434979628ad1e7e4e01ddcefeff21 Mon Sep 17 00:00:00 2001 From: Bikouo Aubin <79859644+abikouo@users.noreply.github.com> Date: Fri, 9 Feb 2024 09:26:17 +0100 Subject: [PATCH 51/51] Update playbooks/webapp/vars/main.yaml Co-authored-by: GomathiselviS --- playbooks/webapp/vars/main.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/playbooks/webapp/vars/main.yaml b/playbooks/webapp/vars/main.yaml index 21c893bf..05df567c 100644 --- a/playbooks/webapp/vars/main.yaml +++ b/playbooks/webapp/vars/main.yaml @@ -59,7 +59,8 @@ rds_replica_cluster_region: us-east-2 rds_replica_cluster_instance_name: "{{ resource_prefix }}-replica-instance" # vars for route53 records -route53_subdomain: "ansiblecloud.xyz" +route53_zone_name: "ansiblecloud.xyz" +route53_subdomain: "flaskapp" # A bucket to save RSA key into bucket_name: "bucket-rsa-{{ resource_prefix }}"