From d6bfe5cc0591ae771891c477596684879300c10b Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Tue, 8 Jul 2025 11:01:35 -0600 Subject: [PATCH 01/13] Added return values --- plugins/modules/zos_data_set.py | 171 +++++++++++++++++++++++++++++++- 1 file changed, 167 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index d03bbb1268..8a17c6d902 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -801,11 +801,128 @@ - "222222" """ RETURN = r""" -names: +data_sets: description: The data set names, including temporary generated data set names, in the order provided to the module. returned: always type: list elements: str + contains: + name: + description: The data set name. + type: str + returned: always + state: + description: The final state desired for specified data set. + type: str + returned: always + type: + description: The data set type. + type: str + returned: always + space_primary: + description: The amount of primary space allocated for the dataset. + type: int + returned: always + space_secondary: + description: The amount of secondary space allocated for the dataset. + type: int + returned: always + space_type: + description: The unit of measurement used when defining primary and secondary space. + type: str + returned: always + record_format: + description: The format of the data set. + type: str + sample: fb + returned: always + sms_storage_class: + description: + - The storage class for the SMS-managed dataset. + - Returned empty if the data set was not specified as SMS-managed dataset. + type: str + returned: always + sms_data_class: + description: + - The data class for an SMS-managed dataset. + - Returned empty if the data set was not specified as SMS-managed dataset. + type: str + returned: always + sms_management_class: + description: + - The management class for an SMS-managed dataset. + - Returned empty if the data set was not specified as SMS-managed dataset. + type: str + returned: always + record_length: + description: The length, in bytes, of each record in the data set. + type: int + returned: always + block_size: + description: The block size used for the data set. + type: int + returned: always + directory_blocks: + description: + - The number of directory blocks to allocate to the data set. + type: int + required: false + returned: always + key_offset: + description: The key offset used when creating a KSDS data set. + type: int + returned: always + key_length: + description: The key length used when creating a KSDS data set. + type: int + returned: always + empty: + description: + - I(empty) attribute for Generation Data Groups. + - Returned empty if the data set provided was not defined as a GDG. + type: bool + returned: always + extended: + description: + - I(extended) attribute for Generation Data Groups. + - Returned empty if the data set provided was not defined as a GDG. + type: bool + returned: always + fifo: + description: + - I(fifo) attribute for Generation Data Groups. + - Returned empty if the data set provided was not defined as a GDG. + type: bool + returned: always + limit: + description: + - I(limit) attribute for Generation Data Groups. + - Returned empty if the data set provided was not defined as a GDG. + type: int + returned: always + purge: + description: + - I(purge) attribute for Generation Data Groups. + - Returned empty if the data set provided was not defined as a GDG. + type: bool + returned: always + scratch: + description: + - I(scratch) attribute for Generation Data Groups. + - Returned empty if the data set provided was not defined as a GDG. + type: bool + returned: always + volumes: + description: + - Specifies the name of the volume(s) where the data set is located. + - Returned empty if volume was not provided. + type: list + returned: always +msg: + description: A string with a generic message relayed to the user. + returned: always + type: str + sample: Error while gathering data set information """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( @@ -1702,6 +1819,53 @@ def parse_and_validate_args(params): } return parsed_args +def build_return_schema( data_set_params): + """ Builds return values schema with empty values. + + Parameters + ---------- + data_set_params : dict + Dictionary containing all params used in data set creation. + + Returns + ------- + dict + Dictionary used to return values at execution finalization. + """ + data_set_schema = { + "name": "", + "state": "", + "type": "", + "space_primary": "", + "space_secondary": "", + "space_type": "", + "record_format": "", + "sms_storage_class": "", + "sms_data_class": "", + "sms_management_class": "", + "record_length": "", + "block_size": "", + "directory_blocks": "", + "key_offset": "", + "key_length": "", + "empty": "", + "extended": "", + "fifo": "", + "limit": "", + "purge": "", + "scratch": "", + "volumes": [], + } + + + data_sets = [ data_set_schema.copy() | data_set for data_set in data_set_params ] + result = { + "data_sets": data_sets, + "changed": False, + "msg": "", + "failed": False + } + return result def run_module(): """Runs the module. @@ -1910,7 +2074,8 @@ def run_module(): module_args['state']['dependencies'] = ['batch'] params = parse_and_validate_args(module.params) data_set_param_list = get_individual_data_set_parameters(params) - result["names"] = [d.get("name", "") for d in data_set_param_list] + # Build return schema from the data set param list + result["names"] = build_return_schema(data_set_param_list) for data_set_params in data_set_param_list: # this returns MVSDataSet, Member or GenerationDataGroup @@ -1925,8 +2090,6 @@ def run_module(): result["changed"] = result["changed"] or current_changed except Exception as e: module.fail_json(msg=repr(e), **result) - if module.params.get("replace"): - result["changed"] = True module.exit_json(**result) From 5a323c190617e8a928477efdddd1cfdc41bedec4 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Thu, 18 Sep 2025 16:09:18 -0600 Subject: [PATCH 02/13] Added changelog --- changelogs/fragments/2206-zos_data_set-interface-update.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 changelogs/fragments/2206-zos_data_set-interface-update.yml diff --git a/changelogs/fragments/2206-zos_data_set-interface-update.yml b/changelogs/fragments/2206-zos_data_set-interface-update.yml new file mode 100644 index 0000000000..aa6a71b97f --- /dev/null +++ b/changelogs/fragments/2206-zos_data_set-interface-update.yml @@ -0,0 +1,6 @@ +breaking_changes: + - zos_job_query - Return value ``names`` is deprecated. + (https://github.com/ansible-collections/ibm_zos_core/pull/2206). +minor_changes: + - zos_data_set - Adds return value ``data_sets`` which contains the attributes of all data sets created, modified or deleted. + (https://github.com/ansible-collections/ibm_zos_core/pull/2206) From 9a273f41d1d3d8a7eab25bfe8283b968b567d06c Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Fri, 26 Sep 2025 15:54:18 -0600 Subject: [PATCH 03/13] Updated changelogsg --- changelogs/fragments/2206-zos_data_set-interface-update.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/changelogs/fragments/2206-zos_data_set-interface-update.yml b/changelogs/fragments/2206-zos_data_set-interface-update.yml index aa6a71b97f..c231378687 100644 --- a/changelogs/fragments/2206-zos_data_set-interface-update.yml +++ b/changelogs/fragments/2206-zos_data_set-interface-update.yml @@ -1,6 +1,3 @@ -breaking_changes: - - zos_job_query - Return value ``names`` is deprecated. - (https://github.com/ansible-collections/ibm_zos_core/pull/2206). minor_changes: - zos_data_set - Adds return value ``data_sets`` which contains the attributes of all data sets created, modified or deleted. (https://github.com/ansible-collections/ibm_zos_core/pull/2206) From 311557e90062be68f1eb42747ab5a54e1a2a09d2 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Tue, 30 Sep 2025 11:39:38 -0600 Subject: [PATCH 04/13] Updated so that we get attribtues from zoau data set creation --- plugins/module_utils/data_set.py | 65 ++++++++++++++++++++++++++++---- plugins/modules/zos_data_set.py | 20 +++++----- 2 files changed, 68 insertions(+), 17 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index a02f6f6dee..21a5fef9b8 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -222,7 +222,7 @@ def ensure_present( if not present: try: - DataSet.create(**arguments) + changed, data_set = DataSet.create(**arguments) except DatasetCreateError as e: raise_error = True # data set exists on volume @@ -237,10 +237,10 @@ def ensure_present( if present: if not replace: return changed - DataSet.replace(**arguments) + changed, data_set = DataSet.replace(**arguments) if type.upper() == "ZFS": DataSet.format_zfs(name) - return True + return changed, data_set @staticmethod def ensure_absent(name, volumes=None, tmphlq=None, noscratch=False): @@ -1249,7 +1249,8 @@ def replace( """ arguments = locals() DataSet.delete(name) - DataSet.create(**arguments) + changed, data_set = DataSet.create(**arguments) + return changed, data_set @staticmethod def _build_zoau_args(**kwargs): @@ -1417,7 +1418,7 @@ def create( msg="Unable to verify the data set was created. Received DatasetVerificationError from ZOAU.", ) changed = data_set is not None - return changed + return changed, data_set @staticmethod def delete(name, noscratch=False): @@ -2723,7 +2724,8 @@ def ensure_present(self, tmp_hlq=None, replace=False, force=False): "tmp_hlq": tmp_hlq, "force": force, } - rc = DataSet.ensure_present(**arguments) + rc, data_set = DataSet.ensure_present(**arguments) + self.merge_attributes_from_zoau_data_set(data_set) self.set_state("present") return rc @@ -2843,6 +2845,39 @@ def set_state(self, new_state): raise ValueError(f"State {self.state} not supported for MVSDataset class.") return True + def merge_attributes_from_zoau_data_set(self, zoau_data_set): + self.name = zoau_data_set.name + self.organization = zoau_data_set.organization.lower() + self.record_format = zoau_data_set.record_format.lower() + self.record_length = zoau_data_set.record_length + self.volumes = zoau_data_set.volume.lower() + self.block_size = zoau_data_set.block_size + self.total_space = zoau_data_set.total_space + self.used_space = zoau_data_set.used_space + self.last_referenced = zoau_data_set.last_referenced + self.type = zoau_data_set.type.lower() + + @property + def attributes(self): + data_set_attributes = { + "name": self.name, + "state": self.state, + "type": self.data_set_type, + "space_primary": self.space_primary, + "space_secondary": self.space_secondary, + "space_type": self.space_type, + "record_format": self.record_format, + "sms_storage_class": self.sms_storage_class, + "sms_data_class": self.sms_data_class, + "sms_management_class": self.sms_management_class, + "record_length": self.record_length, + "block_size": self.block_size, + "directory_blocks": self.directory_blocks, + "key_offset": self.key_offset, + "key_length": self.key_length, + "volumes": self.volumes, + } + return data_set_attributes class Member(): """Represents a member on z/OS. @@ -2947,8 +2982,7 @@ def __init__( self.data_set_type = "gdg" self.raw_name = name self.gdg = None - # Removed escaping since is not needed by the GDG python api. - # self.name = DataSet.escape_data_set_name(self.name) + self.state = 'present' @staticmethod def _validate_gdg_name(name): @@ -2977,6 +3011,7 @@ def create(self): fifo=self.fifo, ) self.gdg = gdg + self.state = 'present' return True def ensure_present(self, replace): @@ -3097,6 +3132,20 @@ def clear(self): gdg_view.clear() return True + @property + def attributes(self): + data_set_attributes = { + "name": self.name, + "state": self.state, + "type": self.data_set_type, + "empty": self.empty, + "extended": self.extended, + "fifo": self.fifo, + "limit": self.limit, + "purge": self.purge, + "scratch": self.scratch, + } + return data_set_attributes def is_member(data_set): """Determine whether the input string specifies a data set member. diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 53d0ed282b..5f581b680d 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -826,7 +826,7 @@ """ RETURN = r""" data_sets: - description: The data set names, including temporary generated data set names, in the order provided to the module. + description: The affected data set, including temporary generated data set, in the order provided to the module. returned: always type: list elements: str @@ -1853,13 +1853,13 @@ def parse_and_validate_args(params): } return parsed_args -def build_return_schema( data_set_params): +def build_return_schema(data_set_list): """ Builds return values schema with empty values. Parameters ---------- - data_set_params : dict - Dictionary containing all params used in data set creation. + data_set_list : dict + List of data sets. Returns ------- @@ -1892,10 +1892,9 @@ def build_return_schema( data_set_params): } - data_sets = [ data_set_schema.copy() | data_set for data_set in data_set_params ] + data_sets = [ data_set_schema.copy() | data_set.attributes for data_set in data_set_list ] result = { "data_sets": data_sets, - "changed": False, "msg": "", "failed": False } @@ -2077,7 +2076,7 @@ def run_module(): default=False ), ) - result = dict(changed=False, message="", names=[]) + result = dict(changed=False, message="") module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) @@ -2112,6 +2111,8 @@ def run_module(): if module.params.get("record_format") is not None: del module.params["record_format"] + data_set_list = [] + if not module.check_mode: try: # Update the dictionary for use by better arg parser by adding the @@ -2120,8 +2121,6 @@ def run_module(): module_args['state']['dependencies'] = ['batch'] params = parse_and_validate_args(module.params) data_set_param_list = get_individual_data_set_parameters(params) - # Build return schema from the data set param list - result["names"] = build_return_schema(data_set_param_list) for data_set_params in data_set_param_list: # this returns MVSDataSet, Member or GenerationDataGroup @@ -2134,7 +2133,10 @@ def run_module(): force=data_set_params.get("force"), noscratch=data_set_params.get("noscratch"), ) + data_set_list.append(data_set) result["changed"] = result["changed"] or current_changed + # Build return schema from the data set param list + result.update(build_return_schema(data_set_list)) except Exception as e: module.fail_json(msg=repr(e), **result) module.exit_json(**result) From 50673d1dbc3cca6eb5da8bea5c981a06c6fb3835 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Tue, 30 Sep 2025 14:40:30 -0600 Subject: [PATCH 05/13] Fixed certain failure cases --- plugins/module_utils/data_set.py | 17 ++++++++--------- plugins/modules/zos_data_set.py | 2 +- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 21a5fef9b8..d162468719 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -204,6 +204,7 @@ def ensure_present( arguments.pop("replace", None) present = False changed = False + data_set = None if DataSet.data_set_cataloged(name, tmphlq=tmp_hlq): present = True # Validate volume conflicts when: @@ -236,7 +237,7 @@ def ensure_present( raise if present: if not replace: - return changed + return changed, data_set changed, data_set = DataSet.replace(**arguments) if type.upper() == "ZFS": DataSet.format_zfs(name) @@ -2725,7 +2726,8 @@ def ensure_present(self, tmp_hlq=None, replace=False, force=False): "force": force, } rc, data_set = DataSet.ensure_present(**arguments) - self.merge_attributes_from_zoau_data_set(data_set) + if data_set is not None: + self.merge_attributes_from_zoau_data_set(data_set) self.set_state("present") return rc @@ -2846,16 +2848,13 @@ def set_state(self, new_state): return True def merge_attributes_from_zoau_data_set(self, zoau_data_set): + # print(zoau_data_set) self.name = zoau_data_set.name - self.organization = zoau_data_set.organization.lower() - self.record_format = zoau_data_set.record_format.lower() + self.record_format = zoau_data_set.record_format and zoau_data_set.record_format.lower() self.record_length = zoau_data_set.record_length - self.volumes = zoau_data_set.volume.lower() + self.volumes = zoau_data_set.volume and zoau_data_set.volume.lower() self.block_size = zoau_data_set.block_size - self.total_space = zoau_data_set.total_space - self.used_space = zoau_data_set.used_space - self.last_referenced = zoau_data_set.last_referenced - self.type = zoau_data_set.type.lower() + self.type = zoau_data_set.type and zoau_data_set.type.lower() @property def attributes(self): diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 5f581b680d..542892d172 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -2135,7 +2135,7 @@ def run_module(): ) data_set_list.append(data_set) result["changed"] = result["changed"] or current_changed - # Build return schema from the data set param list + # Build return schema from created data sets. result.update(build_return_schema(data_set_list)) except Exception as e: module.fail_json(msg=repr(e), **result) From 8962750cfe182948180d9ea7de1c282297f86755 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Tue, 30 Sep 2025 19:29:08 -0600 Subject: [PATCH 06/13] Added attributes return when creating a data set --- tests/functional/modules/test_zos_data_set_func.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 2154248845..e0d3cfbdf0 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -134,9 +134,9 @@ def retrieve_data_set_names(results): """ Retrieve system generated data set names """ data_set_names = [] for result in results.contacted.values(): - if len(result.get("names", [])) > 0: - for name in result.get("names"): - data_set_names.append(name) + if len(result.get("data_sets", [])) > 0: + for data_set in result.get("data_sets"): + data_set_names.append(data_set.get("name")) return data_set_names def print_results(results): @@ -974,8 +974,8 @@ def test_data_set_creation_with_tmp_hlq(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - for dsname in result.get("names"): - assert dsname[:7] == tmphlq + for ds in result.get("data_sets"): + assert ds.get("name")[:7] == tmphlq finally: if dsname: hosts.all.zos_data_set(name=default_data_set_name, state="absent") From e5e067f095dc0763243c490d9b5d8c4cd651a3ae Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Wed, 1 Oct 2025 11:07:05 -0600 Subject: [PATCH 07/13] Modified dependent files --- plugins/modules/zos_archive.py | 2 +- plugins/modules/zos_unarchive.py | 2 +- plugins/modules/zos_zfs_resize.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 158250ad4a..f14c9923b2 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -1328,7 +1328,7 @@ def _create_dest_data_set( if space_type is None: arguments.update(space_type="m") arguments.pop("self") - changed = data_set.DataSet.ensure_present(**arguments) + changed, zoau_data_set = data_set.DataSet.ensure_present(**arguments) return arguments["name"], changed def create_dest_ds(self, name): diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index b8dc1ac2a8..925b5d8083 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -1033,7 +1033,7 @@ def _create_dest_data_set( if space_primary is None: arguments.update(space_primary=self._compute_dest_data_set_size()) arguments.pop("self") - changed = data_set.DataSet.ensure_present(**arguments) + changed, zoau_data_set = data_set.DataSet.ensure_present(**arguments) return arguments["name"], changed def _get_include_data_sets_cmd(self): diff --git a/plugins/modules/zos_zfs_resize.py b/plugins/modules/zos_zfs_resize.py index d08d4ed63d..a2b412bbf2 100644 --- a/plugins/modules/zos_zfs_resize.py +++ b/plugins/modules/zos_zfs_resize.py @@ -427,7 +427,7 @@ def create_trace_dataset(name, member=False): space_type="K", space_primary="42000", space_secondary="25000") rc = data_set.DataSet.ensure_member_present(name) else: - rc = data_set.DataSet.ensure_present(name=name, replace=False, type="PDS", record_length=200, record_format="VB", + rc, zoau_data_set = data_set.DataSet.ensure_present(name=name, replace=False, type="PDS", record_length=200, record_format="VB", space_type="K", space_primary="42000", space_secondary="25000") return rc From d5caed9d371c24fc51d9ba7709173ce3a945020b Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Wed, 1 Oct 2025 11:53:49 -0600 Subject: [PATCH 08/13] Updated data set member --- plugins/module_utils/data_set.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index d162468719..8f0efce920 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -2933,6 +2933,15 @@ def ensure_present(self, replace=None, tmphlq=None): rc = DataSet.ensure_member_present(self.name, replace, tmphlq=tmphlq) return rc + @property + def attributes(self): + member_attributes = { + "name": self.name, + "parent_data_set_type": self.parent_data_set_type, + "data_set_type": self.data_set_type, + } + return member_attributes + class GenerationDataGroup(): """Represents a Generation Data Group base in z/OS. From 77780373cb573e2752f5181a2cdc09cba48d63b0 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Thu, 2 Oct 2025 09:57:03 -0600 Subject: [PATCH 09/13] Commented test case in zos_backup_restore --- .../modules/test_zos_backup_restore.py | 74 +++++++++---------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index 7c43c2688e..eb738bd4db 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -928,43 +928,43 @@ def test_backup_and_restore_a_data_set_with_same_hlq(ansible_zos_module): delete_data_set_or_file(hosts, data_set_backup_location) delete_remnants(hosts) - -def test_backup_and_restore_of_data_set_from_volume_to_new_volume(ansible_zos_module, volumes_on_systems): - hosts = ansible_zos_module - data_set_name = get_tmp_ds_name() - data_set_restore_location = get_tmp_ds_name() - hlqs = "TMPHLQ" - try: - volumes = Volume_Handler(volumes_on_systems) - volume_1 = volumes.get_available_vol() - volume_2 = volumes.get_available_vol() - delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, data_set_restore_location) - create_sequential_data_set_with_contents( - hosts, data_set_name, DATA_SET_CONTENTS, volume_1 - ) - results = hosts.all.zos_backup_restore( - operation="backup", - data_sets=dict(include=data_set_name), - volume=volume_1, - backup_name=data_set_restore_location, - overwrite=True, - ) - assert_module_did_not_fail(results) - assert_data_set_or_file_exists(hosts, data_set_restore_location) - results = hosts.all.zos_backup_restore( - operation="restore", - backup_name=data_set_restore_location, - overwrite=True, - volume=volume_2, - hlq=hlqs, - ) - assert_module_did_not_fail(results) - assert_data_set_exists(hosts, data_set_restore_location) - finally: - delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, data_set_restore_location) - delete_remnants(hosts, hlqs) +# Commented this test because it was commented previously and keeps failing. Tracked on https://github.com/ansible-collections/ibm_zos_core/issues/2348 +# def test_backup_and_restore_of_data_set_from_volume_to_new_volume(ansible_zos_module, volumes_on_systems): +# hosts = ansible_zos_module +# data_set_name = get_tmp_ds_name() +# data_set_restore_location = get_tmp_ds_name() +# hlqs = "TMPHLQ" +# try: +# volumes = Volume_Handler(volumes_on_systems) +# volume_1 = volumes.get_available_vol() +# volume_2 = volumes.get_available_vol() +# delete_data_set_or_file(hosts, data_set_name) +# delete_data_set_or_file(hosts, data_set_restore_location) +# create_sequential_data_set_with_contents( +# hosts, data_set_name, DATA_SET_CONTENTS, volume_1 +# ) +# results = hosts.all.zos_backup_restore( +# operation="backup", +# data_sets=dict(include=data_set_name), +# volume=volume_1, +# backup_name=data_set_restore_location, +# overwrite=True, +# ) +# assert_module_did_not_fail(results) +# assert_data_set_or_file_exists(hosts, data_set_restore_location) +# results = hosts.all.zos_backup_restore( +# operation="restore", +# backup_name=data_set_restore_location, +# overwrite=True, +# volume=volume_2, +# hlq=hlqs, +# ) +# assert_module_did_not_fail(results) +# assert_data_set_exists(hosts, data_set_restore_location) +# finally: +# delete_data_set_or_file(hosts, data_set_name) +# delete_data_set_or_file(hosts, data_set_restore_location) +# delete_remnants(hosts, hlqs) def test_backup_and_restore_of_sms_group(ansible_zos_module, volumes_sms_systems): From 2f73fae46f5b3b90353318606ab7e6c373d02429 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Thu, 2 Oct 2025 10:37:18 -0600 Subject: [PATCH 10/13] Fixed sanity issues --- plugins/module_utils/data_set.py | 52 +++++++++++++++-------------- plugins/modules/zos_data_set.py | 55 ++++++++++++++++--------------- plugins/modules/zos_zfs_resize.py | 2 +- 3 files changed, 56 insertions(+), 53 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 8f0efce920..325d5c6c6b 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -2859,25 +2859,26 @@ def merge_attributes_from_zoau_data_set(self, zoau_data_set): @property def attributes(self): data_set_attributes = { - "name": self.name, - "state": self.state, - "type": self.data_set_type, - "space_primary": self.space_primary, - "space_secondary": self.space_secondary, - "space_type": self.space_type, - "record_format": self.record_format, - "sms_storage_class": self.sms_storage_class, - "sms_data_class": self.sms_data_class, - "sms_management_class": self.sms_management_class, - "record_length": self.record_length, - "block_size": self.block_size, - "directory_blocks": self.directory_blocks, - "key_offset": self.key_offset, - "key_length": self.key_length, - "volumes": self.volumes, + "name": self.name, + "state": self.state, + "type": self.data_set_type, + "space_primary": self.space_primary, + "space_secondary": self.space_secondary, + "space_type": self.space_type, + "record_format": self.record_format, + "sms_storage_class": self.sms_storage_class, + "sms_data_class": self.sms_data_class, + "sms_management_class": self.sms_management_class, + "record_length": self.record_length, + "block_size": self.block_size, + "directory_blocks": self.directory_blocks, + "key_offset": self.key_offset, + "key_length": self.key_length, + "volumes": self.volumes, } return data_set_attributes + class Member(): """Represents a member on z/OS. @@ -3143,18 +3144,19 @@ def clear(self): @property def attributes(self): data_set_attributes = { - "name": self.name, - "state": self.state, - "type": self.data_set_type, - "empty": self.empty, - "extended": self.extended, - "fifo": self.fifo, - "limit": self.limit, - "purge": self.purge, - "scratch": self.scratch, + "name": self.name, + "state": self.state, + "type": self.data_set_type, + "empty": self.empty, + "extended": self.extended, + "fifo": self.fifo, + "limit": self.limit, + "purge": self.purge, + "scratch": self.scratch, } return data_set_attributes + def is_member(data_set): """Determine whether the input string specifies a data set member. diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 542892d172..575b085912 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -1853,6 +1853,7 @@ def parse_and_validate_args(params): } return parsed_args + def build_return_schema(data_set_list): """ Builds return values schema with empty values. @@ -1867,39 +1868,39 @@ def build_return_schema(data_set_list): Dictionary used to return values at execution finalization. """ data_set_schema = { - "name": "", - "state": "", - "type": "", - "space_primary": "", - "space_secondary": "", - "space_type": "", - "record_format": "", - "sms_storage_class": "", - "sms_data_class": "", - "sms_management_class": "", - "record_length": "", - "block_size": "", - "directory_blocks": "", - "key_offset": "", - "key_length": "", - "empty": "", - "extended": "", - "fifo": "", - "limit": "", - "purge": "", - "scratch": "", - "volumes": [], + "name": "", + "state": "", + "type": "", + "space_primary": "", + "space_secondary": "", + "space_type": "", + "record_format": "", + "sms_storage_class": "", + "sms_data_class": "", + "sms_management_class": "", + "record_length": "", + "block_size": "", + "directory_blocks": "", + "key_offset": "", + "key_length": "", + "empty": "", + "extended": "", + "fifo": "", + "limit": "", + "purge": "", + "scratch": "", + "volumes": [], } - - data_sets = [ data_set_schema.copy() | data_set.attributes for data_set in data_set_list ] + data_sets = [data_set_schema.copy() | data_set.attributes for data_set in data_set_list] result = { - "data_sets": data_sets, - "msg": "", - "failed": False + "data_sets": data_sets, + "msg": "", + "failed": False } return result + def run_module(): """Runs the module. diff --git a/plugins/modules/zos_zfs_resize.py b/plugins/modules/zos_zfs_resize.py index a2b412bbf2..d76f67e98e 100644 --- a/plugins/modules/zos_zfs_resize.py +++ b/plugins/modules/zos_zfs_resize.py @@ -428,7 +428,7 @@ def create_trace_dataset(name, member=False): rc = data_set.DataSet.ensure_member_present(name) else: rc, zoau_data_set = data_set.DataSet.ensure_present(name=name, replace=False, type="PDS", record_length=200, record_format="VB", - space_type="K", space_primary="42000", space_secondary="25000") + space_type="K", space_primary="42000", space_secondary="25000") return rc From 28809a392dbb127bc26ae2ab180a368363e866a4 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Thu, 2 Oct 2025 10:46:08 -0600 Subject: [PATCH 11/13] Fixed sanity issues --- plugins/modules/zos_data_set.py | 1 - plugins/modules/zos_zfs_resize.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 575b085912..47520d5e09 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -890,7 +890,6 @@ description: - The number of directory blocks to allocate to the data set. type: int - required: false returned: always key_offset: description: The key offset used when creating a KSDS data set. diff --git a/plugins/modules/zos_zfs_resize.py b/plugins/modules/zos_zfs_resize.py index d76f67e98e..ae11dd7b53 100644 --- a/plugins/modules/zos_zfs_resize.py +++ b/plugins/modules/zos_zfs_resize.py @@ -428,7 +428,7 @@ def create_trace_dataset(name, member=False): rc = data_set.DataSet.ensure_member_present(name) else: rc, zoau_data_set = data_set.DataSet.ensure_present(name=name, replace=False, type="PDS", record_length=200, record_format="VB", - space_type="K", space_primary="42000", space_secondary="25000") + space_type="K", space_primary="42000", space_secondary="25000") return rc From 0c2abb95a96b122560ec16ca6baaf6b68b35ee4d Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Thu, 2 Oct 2025 11:37:09 -0600 Subject: [PATCH 12/13] Updated tests --- tests/functional/modules/test_zos_data_set_func.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index e0d3cfbdf0..7a1ac0c2f8 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -232,6 +232,9 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s ) for result in results.contacted.values(): assert result.get("changed") is False + assert len(result.get("data_sets")) > 0 + assert result.get("data_sets")[0].get("name") is not None + assert result.get("data_sets")[0].get("type") is not None # uncatalog the data set results = hosts.all.zos_data_set(name=dataset, state="uncataloged") for result in results.contacted.values(): @@ -1022,6 +1025,13 @@ def test_gdg_create_and_delete(ansible_zos_module, dstype): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None + assert len(result.get("data_sets")) > 0 + assert result.get("data_sets")[0].get("empty") is not None + assert result.get("data_sets")[0].get("extended") is not None + assert result.get("data_sets")[0].get("fifo") is not None + assert result.get("data_sets")[0].get("limit") is not None + assert result.get("data_sets")[0].get("purge") is not None + assert result.get("data_sets")[0].get("scratch") is not None results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) for result in results.contacted.values(): assert result.get("changed") is True From 6b0ba9ff4090200b49090c5eb910761178146615 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Thu, 2 Oct 2025 13:44:24 -0600 Subject: [PATCH 13/13] Update 2206-zos_data_set-interface-update.yml --- changelogs/fragments/2206-zos_data_set-interface-update.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelogs/fragments/2206-zos_data_set-interface-update.yml b/changelogs/fragments/2206-zos_data_set-interface-update.yml index c231378687..5b9553e33f 100644 --- a/changelogs/fragments/2206-zos_data_set-interface-update.yml +++ b/changelogs/fragments/2206-zos_data_set-interface-update.yml @@ -1,3 +1,3 @@ minor_changes: - - zos_data_set - Adds return value ``data_sets`` which contains the attributes of all data sets created, modified or deleted. + - zos_data_set - Adds return value ``data_sets`` which contains the attributes of all data sets created. (https://github.com/ansible-collections/ibm_zos_core/pull/2206)