Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions changelogs/fragments/2206-zos_data_set-interface-update.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
minor_changes:
- zos_data_set - Adds return value ``data_sets`` which contains the attributes of all data sets created.
(https://github.com/ansible-collections/ibm_zos_core/pull/2206)
77 changes: 68 additions & 9 deletions plugins/module_utils/data_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,7 @@ def ensure_present(
arguments.pop("replace", None)
present = False
changed = False
data_set = None
if DataSet.data_set_cataloged(name, tmphlq=tmp_hlq):
present = True
# Validate volume conflicts when:
Expand All @@ -222,7 +223,7 @@ def ensure_present(

if not present:
try:
DataSet.create(**arguments)
changed, data_set = DataSet.create(**arguments)
except DatasetCreateError as e:
raise_error = True
# data set exists on volume
Expand All @@ -236,11 +237,11 @@ def ensure_present(
raise
if present:
if not replace:
return changed
DataSet.replace(**arguments)
return changed, data_set
changed, data_set = DataSet.replace(**arguments)
if type.upper() == "ZFS":
DataSet.format_zfs(name)
return True
return changed, data_set

@staticmethod
def ensure_absent(name, volumes=None, tmphlq=None, noscratch=False):
Expand Down Expand Up @@ -1249,7 +1250,8 @@ def replace(
"""
arguments = locals()
DataSet.delete(name)
DataSet.create(**arguments)
changed, data_set = DataSet.create(**arguments)
return changed, data_set

@staticmethod
def _build_zoau_args(**kwargs):
Expand Down Expand Up @@ -1417,7 +1419,7 @@ def create(
msg="Unable to verify the data set was created. Received DatasetVerificationError from ZOAU.",
)
changed = data_set is not None
return changed
return changed, data_set

@staticmethod
def delete(name, noscratch=False):
Expand Down Expand Up @@ -2723,7 +2725,9 @@ def ensure_present(self, tmp_hlq=None, replace=False, force=False):
"tmp_hlq": tmp_hlq,
"force": force,
}
rc = DataSet.ensure_present(**arguments)
rc, data_set = DataSet.ensure_present(**arguments)
if data_set is not None:
self.merge_attributes_from_zoau_data_set(data_set)
self.set_state("present")
return rc

Expand Down Expand Up @@ -2843,6 +2847,37 @@ def set_state(self, new_state):
raise ValueError(f"State {self.state} not supported for MVSDataset class.")
return True

def merge_attributes_from_zoau_data_set(self, zoau_data_set):
# print(zoau_data_set)
self.name = zoau_data_set.name
self.record_format = zoau_data_set.record_format and zoau_data_set.record_format.lower()
self.record_length = zoau_data_set.record_length
self.volumes = zoau_data_set.volume and zoau_data_set.volume.lower()
self.block_size = zoau_data_set.block_size
self.type = zoau_data_set.type and zoau_data_set.type.lower()

@property
def attributes(self):
data_set_attributes = {
"name": self.name,
"state": self.state,
"type": self.data_set_type,
"space_primary": self.space_primary,
"space_secondary": self.space_secondary,
"space_type": self.space_type,
"record_format": self.record_format,
"sms_storage_class": self.sms_storage_class,
"sms_data_class": self.sms_data_class,
"sms_management_class": self.sms_management_class,
"record_length": self.record_length,
"block_size": self.block_size,
"directory_blocks": self.directory_blocks,
"key_offset": self.key_offset,
"key_length": self.key_length,
"volumes": self.volumes,
}
return data_set_attributes


class Member():
"""Represents a member on z/OS.
Expand Down Expand Up @@ -2899,6 +2934,15 @@ def ensure_present(self, replace=None, tmphlq=None):
rc = DataSet.ensure_member_present(self.name, replace, tmphlq=tmphlq)
return rc

@property
def attributes(self):
member_attributes = {
"name": self.name,
"parent_data_set_type": self.parent_data_set_type,
"data_set_type": self.data_set_type,
}
return member_attributes


class GenerationDataGroup():
"""Represents a Generation Data Group base in z/OS.
Expand Down Expand Up @@ -2947,8 +2991,7 @@ def __init__(
self.data_set_type = "gdg"
self.raw_name = name
self.gdg = None
# Removed escaping since is not needed by the GDG python api.
# self.name = DataSet.escape_data_set_name(self.name)
self.state = 'present'

@staticmethod
def _validate_gdg_name(name):
Expand Down Expand Up @@ -2977,6 +3020,7 @@ def create(self):
fifo=self.fifo,
)
self.gdg = gdg
self.state = 'present'
return True

def ensure_present(self, replace):
Expand Down Expand Up @@ -3097,6 +3141,21 @@ def clear(self):
gdg_view.clear()
return True

@property
def attributes(self):
data_set_attributes = {
"name": self.name,
"state": self.state,
"type": self.data_set_type,
"empty": self.empty,
"extended": self.extended,
"fifo": self.fifo,
"limit": self.limit,
"purge": self.purge,
"scratch": self.scratch,
}
return data_set_attributes


def is_member(data_set):
"""Determine whether the input string specifies a data set member.
Expand Down
2 changes: 1 addition & 1 deletion plugins/modules/zos_archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -1328,7 +1328,7 @@ def _create_dest_data_set(
if space_type is None:
arguments.update(space_type="m")
arguments.pop("self")
changed = data_set.DataSet.ensure_present(**arguments)
changed, zoau_data_set = data_set.DataSet.ensure_present(**arguments)
return arguments["name"], changed

def create_dest_ds(self, name):
Expand Down
177 changes: 171 additions & 6 deletions plugins/modules/zos_data_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -825,11 +825,127 @@
- "222222"
"""
RETURN = r"""
names:
description: The data set names, including temporary generated data set names, in the order provided to the module.
data_sets:
description: The affected data set, including temporary generated data set, in the order provided to the module.
returned: always
type: list
elements: str
contains:
name:
description: The data set name.
type: str
returned: always
state:
description: The final state desired for specified data set.
type: str
returned: always
type:
description: The data set type.
type: str
returned: always
space_primary:
description: The amount of primary space allocated for the dataset.
type: int
returned: always
space_secondary:
description: The amount of secondary space allocated for the dataset.
type: int
returned: always
space_type:
description: The unit of measurement used when defining primary and secondary space.
type: str
returned: always
record_format:
description: The format of the data set.
type: str
sample: fb
returned: always
sms_storage_class:
description:
- The storage class for the SMS-managed dataset.
- Returned empty if the data set was not specified as SMS-managed dataset.
type: str
returned: always
sms_data_class:
description:
- The data class for an SMS-managed dataset.
- Returned empty if the data set was not specified as SMS-managed dataset.
type: str
returned: always
sms_management_class:
description:
- The management class for an SMS-managed dataset.
- Returned empty if the data set was not specified as SMS-managed dataset.
type: str
returned: always
record_length:
description: The length, in bytes, of each record in the data set.
type: int
returned: always
block_size:
description: The block size used for the data set.
type: int
returned: always
directory_blocks:
description:
- The number of directory blocks to allocate to the data set.
type: int
returned: always
key_offset:
description: The key offset used when creating a KSDS data set.
type: int
returned: always
key_length:
description: The key length used when creating a KSDS data set.
type: int
returned: always
empty:
description:
- I(empty) attribute for Generation Data Groups.
- Returned empty if the data set provided was not defined as a GDG.
type: bool
returned: always
extended:
description:
- I(extended) attribute for Generation Data Groups.
- Returned empty if the data set provided was not defined as a GDG.
type: bool
returned: always
fifo:
description:
- I(fifo) attribute for Generation Data Groups.
- Returned empty if the data set provided was not defined as a GDG.
type: bool
returned: always
limit:
description:
- I(limit) attribute for Generation Data Groups.
- Returned empty if the data set provided was not defined as a GDG.
type: int
returned: always
purge:
description:
- I(purge) attribute for Generation Data Groups.
- Returned empty if the data set provided was not defined as a GDG.
type: bool
returned: always
scratch:
description:
- I(scratch) attribute for Generation Data Groups.
- Returned empty if the data set provided was not defined as a GDG.
type: bool
returned: always
volumes:
description:
- Specifies the name of the volume(s) where the data set is located.
- Returned empty if volume was not provided.
type: list
returned: always
msg:
description: A string with a generic message relayed to the user.
returned: always
type: str
sample: Error while gathering data set information
"""

from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import (
Expand Down Expand Up @@ -1737,6 +1853,53 @@ def parse_and_validate_args(params):
return parsed_args


def build_return_schema(data_set_list):
""" Builds return values schema with empty values.

Parameters
----------
data_set_list : dict
List of data sets.

Returns
-------
dict
Dictionary used to return values at execution finalization.
"""
data_set_schema = {
"name": "",
"state": "",
"type": "",
"space_primary": "",
"space_secondary": "",
"space_type": "",
"record_format": "",
"sms_storage_class": "",
"sms_data_class": "",
"sms_management_class": "",
"record_length": "",
"block_size": "",
"directory_blocks": "",
"key_offset": "",
"key_length": "",
"empty": "",
"extended": "",
"fifo": "",
"limit": "",
"purge": "",
"scratch": "",
"volumes": [],
}

data_sets = [data_set_schema.copy() | data_set.attributes for data_set in data_set_list]
result = {
"data_sets": data_sets,
"msg": "",
"failed": False
}
return result


def run_module():
"""Runs the module.

Expand Down Expand Up @@ -1913,7 +2076,7 @@ def run_module():
default=False
),
)
result = dict(changed=False, message="", names=[])
result = dict(changed=False, message="")

module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)

Expand Down Expand Up @@ -1948,6 +2111,8 @@ def run_module():
if module.params.get("record_format") is not None:
del module.params["record_format"]

data_set_list = []

if not module.check_mode:
try:
# Update the dictionary for use by better arg parser by adding the
Expand All @@ -1956,7 +2121,6 @@ def run_module():
module_args['state']['dependencies'] = ['batch']
params = parse_and_validate_args(module.params)
data_set_param_list = get_individual_data_set_parameters(params)
result["names"] = [d.get("name", "") for d in data_set_param_list]

for data_set_params in data_set_param_list:
# this returns MVSDataSet, Member or GenerationDataGroup
Expand All @@ -1969,11 +2133,12 @@ def run_module():
force=data_set_params.get("force"),
noscratch=data_set_params.get("noscratch"),
)
data_set_list.append(data_set)
result["changed"] = result["changed"] or current_changed
# Build return schema from created data sets.
result.update(build_return_schema(data_set_list))
except Exception as e:
module.fail_json(msg=repr(e), **result)
if module.params.get("replace"):
result["changed"] = True
module.exit_json(**result)


Expand Down
Loading