From 01d5e93e26d959392d890280583bc9fdf75f4a2f Mon Sep 17 00:00:00 2001 From: Jan Pokorny Date: Tue, 22 Sep 2020 14:36:04 +0200 Subject: [PATCH] fix: raid volume pre cleanup Cause: Existing data were not removed from member disks before RAID volume creation. Fix: RAID volumes now remove existing data from member disks as needed before creation. Signed-off by: Jan Pokorny --- library/blivet.py | 9 ++- tests/tests_raid_volume_cleanup.yml | 102 ++++++++++++++++++++++++++++ 2 files changed, 109 insertions(+), 2 deletions(-) create mode 100644 tests/tests_raid_volume_cleanup.yml diff --git a/library/blivet.py b/library/blivet.py index 79eb105c..5e03a9d8 100644 --- a/library/blivet.py +++ b/library/blivet.py @@ -1002,8 +1002,13 @@ def _create(self): if self._device: return - if safe_mode: - raise BlivetAnsibleError("cannot create new RAID in safe mode") + for spec in self._volume["disks"]: + disk = self._blivet.devicetree.resolve_device(spec) + if not disk.isleaf or disk.format.type is not None: + if safe_mode and (disk.format.type is not None or disk.format.name != get_format(None).name): + raise BlivetAnsibleError("cannot remove existing formatting and/or devices on disk '%s' in safe mode" % disk.name) + else: + self._blivet.devicetree.recursive_remove(disk) # begin creating the devices members = self._create_raid_members(self._volume["disks"]) diff --git a/tests/tests_raid_volume_cleanup.yml b/tests/tests_raid_volume_cleanup.yml new file mode 100644 index 00000000..36f25ddd --- /dev/null +++ b/tests/tests_raid_volume_cleanup.yml @@ -0,0 +1,102 @@ +--- +- name: Test RAID cleanup + hosts: all + become: true + vars: + storage_safe_mode: false + storage_use_partitions: true + mount_location1: '/opt/test1' + mount_location2: '/opt/test2' + volume1_size: '5g' + volume2_size: '4g' + + tasks: + - name: Run the role + include_role: + name: linux-system-roles.storage + + - name: Mark tasks to be skipped + set_fact: + storage_skip_checks: + - blivet_available + - packages_installed + - service_facts + + - name: Get unused disks + include_tasks: get_unused_disk.yml + vars: + max_return: 3 + disks_needed: 3 + + - name: Create two LVM logical volumes under volume group 'foo' + include_role: + name: linux-system-roles.storage + vars: + storage_pools: + - name: foo + disks: "{{ unused_disks }}" + volumes: + - name: test1 + size: "{{ volume1_size }}" + mount_point: "{{ mount_location1 }}" + - name: test2 + size: "{{ volume2_size }}" + mount_point: "{{ mount_location2 }}" + + - name: Enable safe mode + set_fact: + storage_safe_mode: true + + - name: >- + Try to overwrite existing device with raid volume + and safe mode on (expect failure) + include_tasks: verify-role-failed.yml + vars: + __storage_failed_regex: cannot remove existing formatting.*in safe mode + __storage_failed_msg: >- + Unexpected behavior when overwriting existing device with RAID volume + __storage_failed_params: + storage_volumes: + - name: test1 + type: raid + raid_level: "raid1" + raid_device_count: 2 + raid_spare_count: 1 + disks: "{{ unused_disks }}" + mount_point: "{{ mount_location1 }}" + state: present + + - name: Disable safe mode + set_fact: + storage_safe_mode: false + + - name: Create a RAID0 device mounted on "{{ mount_location1 }}" + include_role: + name: linux-system-roles.storage + vars: + storage_volumes: + - name: test1 + type: raid + raid_level: "raid1" + raid_device_count: 2 + raid_spare_count: 1 + disks: "{{ unused_disks }}" + mount_point: "{{ mount_location1 }}" + state: present + + - name: Verify role results + include_tasks: verify-role-results.yml + + - name: Cleanup - remove the disk device created above + include_role: + name: linux-system-roles.storage + vars: + storage_volumes: + - name: test1 + type: raid + raid_level: "raid1" + raid_device_count: 2 + raid_spare_count: 1 + disks: "{{ unused_disks }}" + mount_point: "{{ mount_location1 }}" + state: absent