From 0a6c24cab6dad61d3ddf71ce5da907e981280feb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Sat, 12 Jul 2025 11:58:01 -0500 Subject: [PATCH 01/73] [Enabler][2148]update_zos_job_query_module (#2204) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Change interface * Remove lines * Update documentation * Move steps out of ret_code * Adjust documentation for steps and module * Replace steps on test suite * Fix documentation * Add fragment * Update changelogs/fragments/2204-Update_zos_job_query_module.yml Co-authored-by: Fernando Flores * Fix null for none --------- Co-authored-by: Fernando Flores Co-authored-by: André Marcel Gutiérrez Benítez --- .../2204-Update_zos_job_query_module.yml | 3 + plugins/module_utils/job.py | 4 +- plugins/modules/zos_job_query.py | 97 +++++--- .../modules/test_zos_job_query_func.py | 222 ++++++++++++++++-- 4 files changed, 272 insertions(+), 54 deletions(-) create mode 100644 changelogs/fragments/2204-Update_zos_job_query_module.yml diff --git a/changelogs/fragments/2204-Update_zos_job_query_module.yml b/changelogs/fragments/2204-Update_zos_job_query_module.yml new file mode 100644 index 0000000000..0e2a79c186 --- /dev/null +++ b/changelogs/fragments/2204-Update_zos_job_query_module.yml @@ -0,0 +1,3 @@ +breaking_changes: + - zos_job_query - Return value ``message`` is deprecated in favor of ``msg``. Return value ``steps`` are no longer under `` but is now included under jobs. + (https://github.com/ansible-collections/ibm_zos_core/pull/2204). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 53b5a370e1..1b0351d682 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -381,7 +381,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["queue_position"] = entry.queue_position job["program_name"] = entry.program_name job["class"] = "" - job["ret_code"]["steps"] = [] + job["steps"] = [] job["ddnames"] = [] job["duration"] = duration if hasattr(entry, "execution_time"): @@ -493,7 +493,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T dd["content"] = tmpcont.split("\n") - job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) + job["steps"].extend(_parse_steps(tmpcont)) job["ddnames"].append(dd) if len(job["class"]) < 1: diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index a9d237dfc0..398768c3bf 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -108,6 +108,7 @@ True if the state was changed, otherwise False. returned: always type: bool + sample: True jobs: description: The output information for a list of jobs matching specified criteria. @@ -153,21 +154,21 @@ The job entry subsystem that MVS uses to do work. type: str sample: STL1 - cpu_time: + origin_node: description: - Sum of the CPU time used by each job step, in microseconds. - type: int - sample: 5 + Origin node that submitted the job. + type: str + sample: "STL1" execution_node: description: Execution node that picked the job and executed it. type: str sample: "STL1" - origin_node: + cpu_time: description: - Origin node that submitted the job. - type: str - sample: "STL1" + Sum of the CPU time used by each job step, in microseconds. + type: int + sample: 5 ret_code: description: Return code output collected from job log. @@ -210,19 +211,35 @@ The CC returned for this step in the DD section. type: int sample: 0 - sample: ret_code: { "msg": "CC 0000", "msg_code": "0000", "msg_txt": "", - "code": 0, - "steps": [ - { "step_name": "STEP0001", - "step_cc": 0 - } - ] + "code": 0 } + steps: + description: + Series of JCL steps that were executed and their return codes. + type: list + elements: dict + contains: + step_name: + description: + Name of the step shown as "was executed" in the DD section. + type: str + sample: "STEP0001" + step_cc: + description: + The CC returned for this step in the DD section. + type: int + sample: 0 + sample: + "steps": [ + { "step_name": "STEP0001", + "step_cc": 0 + } + ] job_class: description: Job class for this job. @@ -277,14 +294,20 @@ "job_name": "LINKJOB", "owner": "ADMIN", "job_id": "JOB01427", - "ret_code": "null", - "job_class": "K", "content_type": "JOB", - "svc_class": "?", + "ret_code": { "msg" : "CC", "msg_code" : "0000", "code" : "0", msg_txt : "CC" }, + "steps": [ + { "step_name": "STEP0001", + "step_cc": 0 + } + ], + "job_class": "STC", + "svc_class": "null", "priority": 1, "asid": 0, "creation_date": "2023-05-03", "creation_time": "12:13:00", + "program_name": "BPXBATCH", "queue_position": 3, "execution_time": "00:00:02", "system": "STL1", @@ -298,7 +321,8 @@ "owner": "ADMIN", "job_id": "JOB16577", "content_type": "JOB", - "ret_code": { "msg": "CANCELED", "code": "null" }, + "ret_code": { "msg" : "CANCELED", "msg_code" : "null", "code" : "null", msg_txt : "CANCELED" }, + "steps" : [], "job_class": "A", "svc_class": "E", "priority": 0, @@ -307,6 +331,7 @@ "creation_time": "12:14:00", "queue_position": 0, "execution_time": "00:00:03", + "program_name": "null", "system": "STL1", "subsystem": "STL1", "cpu_time": 1414, @@ -314,7 +339,7 @@ "origin_node": "STL1" }, ] -message: +msg: description: Message returned on failure. type: str @@ -349,7 +374,7 @@ def run_module(): job_id=dict(type="str", required=False), ) - result = dict(changed=False, message="") + result = dict(changed=False) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) @@ -379,6 +404,7 @@ def run_module(): jobs_raw = query_jobs(name, id, owner) if jobs_raw: jobs = parsing_jobs(jobs_raw) + result["changed"] = True else: jobs = None @@ -436,8 +462,8 @@ def parsing_jobs(jobs_raw): Parsed jobs. """ jobs = [] - ret_code = {} for job in jobs_raw: + ret_code = job.get("ret_code") # Easier to see than checking for an empty string, JOB NOT FOUND was # replaced with None in the jobs.py and msg_txt field describes the job query instead if job.get("ret_code") is None: @@ -449,30 +475,32 @@ def parsing_jobs(jobs_raw): if "AC" in status_raw: # the job is active - ret_code = None + ret_code["msg"] = None + ret_code["msg_code"] = None + ret_code["code"] = None + ret_code["msg_txt"] = None + elif "CC" in status_raw: # status = 'Completed normally' - ret_code = { - "msg": status_raw, - "code": job.get("ret_code").get("code"), - } + ret_code["msg"] = status_raw + elif "ABEND" in status_raw: # status = 'Ended abnormally' - ret_code = { - "msg": status_raw, - "code": job.get("ret_code").get("code"), - } + ret_code["msg"] = status_raw + elif "ABENDU" in status_raw: # status = 'Ended abnormally' - ret_code = {"msg": status_raw, "code": job.get("ret_code").get("code")} + ret_code["msg"] = status_raw elif "CANCELED" in status_raw or "JCLERR" in status_raw or "JCL ERROR" in status_raw or "JOB NOT FOUND" in status_raw: # status = status_raw - ret_code = {"msg": status_raw, "code": None} + ret_code["msg"] = status_raw + ret_code["code"] = None + ret_code["msg_code"] = None else: # status = 'Unknown' - ret_code = {"msg": status_raw, "code": job.get("ret_code").get("code")} + ret_code["msg"] = status_raw job_dict = { "job_name": job.get("job_name"), @@ -485,6 +513,7 @@ def parsing_jobs(jobs_raw): "execution_node": job.get("execution_node"), "origin_node": job.get("origin_node"), "ret_code": ret_code, + "steps": job.get("steps"), "job_class": job.get("job_class"), "svc_class": job.get("svc_class"), "priority": job.get("priority"), diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 6f920cfea4..929200a748 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -49,8 +49,36 @@ def test_zos_job_query_func(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_query(job_name="*", owner="*") for result in results.contacted.values(): - assert result.get("changed") is False + assert result.get("changed") is True assert result.get("jobs") is not None + assert result.get("msg", False) is False + + job = result.get("jobs")[0] + assert job.get("job_name") is not None + assert job.get("owner") is not None + assert job.get("job_id") is not None + assert job.get("content_type") is not None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("execution_node") is not None + assert job.get("cpu_time") is not None + assert job.get("job_class") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("program_name") is not None + assert job.get("execution_time") is not None + assert job.get("svc_class") is None + assert job.get("steps") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is not None + assert rc.get("code") is not None + assert rc.get("msg_code") is not None + assert rc.get("msg_txt") is not None + JCLQ_FILE_CONTENTS = """//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, // MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM @@ -86,21 +114,73 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): src=f"{jdata_set_name}(SAMPLE)", location="data_set", wait_time_s=10 ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get("jobs")[0].get("execution_time") is not None + # Default validation + assert result.get("changed") is True + assert result.get("jobs") is not None + assert result.get("msg", False) is False + + job = result.get("jobs")[0] + assert job.get("job_name") is not None + assert job.get("owner") is not None + assert job.get("job_id") is not None + assert job.get("content_type") is not None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("execution_node") is not None + assert job.get("cpu_time") is not None + assert job.get("job_class") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("program_name") is not None + assert job.get("svc_class") is None + assert job.get("steps") is not None - fulljobid = result.get("jobs")[0].get("job_id") + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is not None + assert rc.get("msg_code") == "0000" + assert rc.get("code") == 0 + assert rc.get("msg_txt") == "CC" + + fulljobid = job.get("job_id") jobmask = fulljobid[0:3] + '*' + fulljobid[5:6] + '*' qresults = hosts.all.zos_job_query(job_id=jobmask) for qresult in qresults.contacted.values(): + assert qresult.get("changed") is True assert qresult.get("jobs") is not None - assert qresult.get("jobs")[0].get("execution_time") is not None - assert qresult.get("jobs")[0].get("system") is not None - assert qresult.get("jobs")[0].get("subsystem") is not None - assert "cpu_time" in result.get("jobs")[0] - assert "execution_node" in result.get("jobs")[0] - assert "origin_node" in result.get("jobs")[0] + assert qresult.get("msg", False) is False + + job = qresult.get("jobs")[0] + assert job.get("job_name") is not None + assert job.get("owner") is not None + assert job.get("job_id") == fulljobid + assert job.get("content_type") is not None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("execution_node") is not None + assert job.get("cpu_time") is not None + assert job.get("job_class") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("program_name") is not None + assert job.get("execution_time") is not None + assert job.get("svc_class") is None + assert job.get("steps") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is not None + assert rc.get("msg_code") == "0000" + assert rc.get("code") == 0 + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") @@ -127,18 +207,71 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): src=f"{ndata_set_name}(SAMPLE)", location="data_set", wait_time_s=10 ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get("jobs")[0].get("execution_time") is not None - assert result.get("jobs")[0].get("system") is not None - assert result.get("jobs")[0].get("subsystem") is not None + assert result.get("changed") is True + assert result.get("jobs") is not None + assert result.get("msg", False) is False + + job = result.get("jobs")[0] + assert job.get("job_name") is not None + assert job.get("owner") is not None + assert job.get("job_id") is not None + assert job.get("content_type") is not None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("execution_node") is not None + assert job.get("cpu_time") is not None + assert job.get("job_class") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("program_name") is not None + assert job.get("svc_class") is None + assert job.get("steps") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is not None + assert rc.get("msg_code") == "0000" + assert rc.get("code") == 0 + assert rc.get("msg_txt") == "CC" jobname = "HE*L*" qresults = hosts.all.zos_job_query(job_name=jobname, owner="*") for qresult in qresults.contacted.values(): + assert qresult.get("changed") is True assert qresult.get("jobs") is not None - assert qresult.get("jobs")[0].get("execution_time") is not None + assert qresult.get("msg", False) is False + + job = qresult.get("jobs")[0] + assert job.get("job_name") == "HELLO" + assert job.get("owner") is not None + assert job.get("job_id") is not None + assert job.get("content_type") is not None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("execution_node") is not None + assert job.get("cpu_time") is not None + assert job.get("job_class") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("program_name") is not None + assert job.get("execution_time") is not None + assert job.get("svc_class") is None + assert job.get("steps") is not None + rc = job.get("ret_code") + assert rc.get("msg") is not None + assert rc.get("msg_code") == "0000" + assert rc.get("code") == 0 + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=ndata_set_name, state="absent") @@ -150,7 +283,34 @@ def test_zos_job_id_query_short_ids_func(ansible_zos_module): job_id = get_job_id(hosts, len_id) qresults = hosts.all.zos_job_query(job_id=job_id) for qresult in qresults.contacted.values(): + assert qresult.get("changed") is True assert qresult.get("jobs") is not None + assert qresult.get("msg", False) is False + + job = qresult.get("jobs")[0] + assert job.get("job_name") is not None + assert job.get("owner") is not None + assert job.get("job_id") is not None + assert job.get("content_type") is not None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("execution_node") is not None + assert job.get("cpu_time") is not None + assert job.get("job_class") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("program_name") is not None + assert job.get("svc_class") is None + assert job.get("steps") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is not None + assert rc.get("msg_code") == "0000" + assert rc.get("code") == 0 + assert rc.get("msg_txt") == "CC" def test_zos_job_id_query_short_ids_with_wilcard_func(ansible_zos_module): @@ -167,5 +327,31 @@ def test_zos_job_id_query_short_ids_with_wilcard_func(ansible_zos_module): content_type = "JOB" for qresult in qresults.contacted.values(): + assert qresult.get("changed") is True assert qresult.get("jobs") is not None - assert qresult.get("jobs")[0].get("content_type") == content_type + assert qresult.get("msg", False) is False + + job = qresult.get("jobs")[0] + assert job.get("job_name") is not None + assert job.get("owner") is not None + assert job.get("job_id") is not None + assert job.get("content_type") == content_type + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("execution_node") is not None + assert job.get("cpu_time") is not None + assert job.get("job_class") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("program_name") is not None + assert job.get("svc_class") is None + assert job.get("steps") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is not None + assert rc.get("msg_code") == "0000" + assert rc.get("code") == 0 + assert rc.get("msg_txt") == "CC" From 51349973f2266097c2acd3b16c0448bd17d7eb5e Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Sat, 2 Aug 2025 23:51:27 +0530 Subject: [PATCH 02/73] Adding new module --- plugins/module_utils/better_arg_parser.py | 66 +++ plugins/modules/zos_started_task.py | 430 ++++++++++++++++++ .../modules/test_zos_started_task_func.py | 193 ++++++++ 3 files changed, 689 insertions(+) create mode 100644 plugins/modules/zos_started_task.py create mode 100644 tests/functional/modules/test_zos_started_task_func.py diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index e5dd8e975c..f61abd4652 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -160,6 +160,8 @@ def __init__(self, arg_name, contents, resolved_args, arg_defs): "data_set": self._data_set_type, "data_set_base": self._data_set_base_type, "data_set_member": self._data_set_member_type, + "member_name": self._member_name_type, + "identifier_name": self._identifier_name_type, "qualifier": self._qualifier_type, "qualifier_or_empty": self._qualifier_or_empty_type, "qualifier_pattern": self._qualifier_pattern_type, @@ -328,6 +330,70 @@ def _bool_type(self, contents, resolve_dependencies): if not isinstance(contents, bool): raise ValueError('Invalid argument "{0}" for type "bool".'.format(contents)) return contents + + def _member_name_type(self, contents, resolve_dependencies): + """Resolver for data_set type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, + which have already been handled, + for use during current arguments handling operations. + + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. + """ + if not fullmatch( + r"^[A-Z$#@]{1}[A-Z0-9$#@]{0,7}$", + str(contents), + IGNORECASE, + ): + raise ValueError( + 'Invalid argument "{0}" for type "data_set".'.format(contents) + ) + return str(contents) + + def _identifier_name_type(self, contents, resolve_dependencies): + """Resolver for data_set type arguments. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, + which have already been handled, + for use during current arguments handling operations. + + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. + """ + if not fullmatch( + r"^[A-Z]{1}[A-Z0-9$#@]{0,7}$", + str(contents), + IGNORECASE, + ): + raise ValueError( + 'Invalid argument "{0}" for type "data_set".'.format(contents) + ) + return str(contents) def _path_type(self, contents, resolve_dependencies): """Resolver for path type arguments. diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py new file mode 100644 index 0000000000..681e0107ff --- /dev/null +++ b/plugins/modules/zos_started_task.py @@ -0,0 +1,430 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2022, 2025 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import (absolute_import, division, print_function) +import traceback + +__metaclass__ = type + +DOCUMENTATION = r""" +module: zos_started_task +version_added: 1.16.0 +author: + - "Ravella Surendra Babu (@surendra.ravella582)" +short_description: Perform operations on started tasks. +description: + - Start, display, modify, cancel, force and stop a started task + +options: + asid: + description: + - I(asid) is a unique address space identifier which gets assigned to each running task. + required: false + type: str + device_type: + description: + - I(device_type) is the type of the output device (if any) associated with the task. + required: false + type: str + device_number: + description: + - I(device_number) is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. + A slash (/) must precede a 4-digit number but is not before a 3-digit number. + required: false + type: str + identifier: + description: + - I(device_number) is the name that identifies the task to be started. This name can be up to 8 characters long. + The first character must be alphabetical. + required: false + type: str + job_account: + description: + - I(job_account) specifies accounting data in the JCL JOB statement for the started task. + If the source JCL was a job and has already accounting data, the value that is specified on this parameter + overrides the accounting data in the source JCL. + required: false + type: str + job_name: + description: + - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, + then member_name is used as job_name. + required: false + type: str + keyword_parameters: + description: + - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. + The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than + 44 characters in length. + required: false + type: str + member_name: + description: + - I(member_name) is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL + for the task to be started. The member can be either a job or a cataloged procedure. + required: false + type: str + operation: + description: + - The started task operation which needs to be performed. + - > + If I(operation=start) and the data set does not exist on the managed node, + no action taken, module completes successfully with I(changed=False). + required: false + type: str + choices: + - start + - stop + - modify + - display + - force + - cancel + parameters: + description: + - Program parameters passed to the started program, which might be a list in parentheses or a string in single quotation marks + required: false + type: str + reus_asid: + description: + - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, + a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified + on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. + subsystem_name: + description: + - The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, + which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. + required: false + type: str + volume_serial: + description: + - If devicetype is a tape or direct-access device, the volume serial number of the volume is mounted on the device. + required: false + type: str +""" +EXAMPLES = r""" +- name: Start a started task using member name. + zos_started_task: + member: "PROCAPP" + operation: "start" +""" +RETURN = r""" + +""" + +from ansible.module_utils.basic import AnsibleModule + +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser +) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + zoau_version_checker +) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + ZOAUImportError +) + +try: + from zoautil_py import opercmd +except Exception: + datasets = ZOAUImportError(traceback.format_exc()) + gdgs = ZOAUImportError(traceback.format_exc()) + +try: + from zoautil_py import exceptions as zoau_exceptions +except ImportError: + zoau_exceptions = ZOAUImportError(traceback.format_exc()) + +def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): + """Execute operator command. + + Parameters + ---------- + operator_cmd : str + Operator command. + timeout_s : int + Timeout to wait for the command execution, measured in centiseconds. + *args : dict + Arguments for the command. + **kwargs : dict + More arguments for the command. + + Returns + ------- + OperatorQueryResult + The result of the command. + """ + # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: + timeout_c = 100 * timeout_s + response = opercmd.execute(operator_cmd, timeout_c, *args, **kwargs) + + rc = response.rc + stdout = response.stdout_response + stderr = response.stderr_response + return rc, stdout, stderr + +def prepare_start_command(member, identifier, job_name, job_account, device, volume_serial, subsystem_name, reus_asid, parameters, keyword_parameters): + cmd = 'S '+member + if identifier: + cmd = cmd + "." + identifier + "," + device + "," + volume_serial + "," + parameters + if jobname: + cmd = cmd + ",jobname=" + job_name + if jobaccount: + cmd = cmd + ",jobacct=" + job_account + if subsystem_name: + cmd = cmd + ",SUB=" + subsystem_name + if reus_asid: + cmd = cmd + ",REUSASID=" + reus_asid + if keyword_parameters: + cmd = cmd + "," + keyword_parameters + return cmd + +def run_module(): + """Initialize the module. + + Raises + ------ + fail_json + z/OS started task operation failed. + """ + module = AnsibleModule( + argument_spec={ + 'operation': { + 'type': 'str', + 'required': True, + 'choices': ['start', 'stop', 'modify', 'display', 'force', 'cancel'] + }, + 'member_name': { + 'type': 'str', + 'required': False, + 'aliases': ['member'] + }, + 'identifier': { + 'arg_type': 'str', + 'required': False + }, + 'job_name': { + 'type': 'str', + 'required': False, + 'aliases': ['task_name'] + }, + 'job_account': { #55 chars + 'type': 'str', + 'required': False + }, + 'device_type': { + 'type': 'str', + 'required': False + }, + 'device_number': { #A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. + 'type': 'str', + 'required': False + }, + 'volume_serial': { + 'type': 'str', + 'required': False + }, + 'subsystem_name': { #The name must be 1 - 4 characters + 'type': 'str', + 'required': False + }, + 'reus_asid': { + 'type': 'bool', + 'required': False, + 'choices': ['yes', 'no'] + }, + 'parameters': { + 'type': 'str', + 'required': False + }, + 'keyword_parameters': { #The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than 44 characters in length. + 'type': 'str', + 'required': False + }, + 'asid': { + 'type': 'str', + 'required': False + } + }, + mutually_exclusive=[ + ['job_name', 'identifier'], + ['device_name', 'device_type'] + ], + supports_check_mode=True + ) + + args_def = { + 'operation': { + 'type': 'str', + 'required': True + }, + 'member_name': { + 'arg_type': 'member_name', + 'required': False, + 'aliases': ['member'] + }, + 'identifier_name': { + 'arg_type': 'identifier_name', + 'required': False + }, + 'job_name': { + 'arg_type': 'str', + 'required': False, + 'aliases': ['job'] + }, + 'job_account': { + 'arg_type': 'str', + 'required': False + }, + 'device_type': { + 'arg_type': 'str', + 'required': False + }, + 'device_number': { + 'arg_type': 'str', + 'required': False + }, + 'volume_serial': { + 'arg_type': 'str', + 'required': False + }, + 'subsystem_name': { + 'arg_type': 'str', + 'required': False + }, + 'reus_asid': { + 'arg_type': 'str', + 'required': False + }, + 'parameters': { + 'arg_type': 'str', + 'required': False + }, + 'keyword_parameters': { + 'arg_type': 'str', + 'required': False + }, + 'asid': { + 'arg_type': 'str', + 'required': False + } + } + + try: + parser = better_arg_parser.BetterArgParser(args_def) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args + except ValueError as err: + module.fail_json( + msg='Parameter verification failed.', + stderr=str(err) + ) + operation = module.params.get('operation') + member = module.params.get('member_name') + identifier = module.params.get('identifier') + job_name = module.params.get('job_name') + job_account = module.params.get('job_account') + asid = module.params.get('asid') + parameters = module.params.get('parameters') + device_type = module.params.get('device_type') + device_number = module.params.get('device_number') + volume_serial = module.params.get('volume_serial') + subsystem_name = module.params.get('subsystem_name') + reus_asid = module.params.get('reus_asid') + keyword_parameters = module.params.get('keyword_parameters') + device = device_type if device_type is not None else device_number + kwargs = {} + + wait_s = 5 + + use_wait_arg = False + if zoau_version_checker.is_zoau_version_higher_than("1.2.4"): + use_wait_arg = True + + if use_wait_arg: + kwargs.update({"wait": True}) + + args = [] + cmd = '' + started_task_name = "" + if operation != 'start': + if job_name is not None: + started_task_name = job_name + elif member is not None: + started_task_name = member + if identifier is not None: + started_task_name = started_task_name + "." + identifier + else: + module.fail_json( + msg="either member_name or identifier is needed but both are missing.", + changed=False + ) + if operation == 'start': + ##member name is mandatory + if member is None or member.strip() == "": + module.fail_json( + msg="member_name is missing which is mandatory.", + changed=False + ) + cmd = prepare_start_command(member, identifier, job_name, job_account, device, volume_serial, subsystem_name, reus_asid, parameters, keyword_parameters) + elif operation == 'display': + cmd = 'd a,'+started_task_name + elif operation == 'stop': + cmd = 'p '+started_task_name + elif operation == 'cancel': + cmd = 'c '+started_task_name + if asid: + cmd = cmd+',a='+asid + elif operation == 'force': + cmd = 'force '+started_task_name + if asid: + cmd = cmd+',a='+asid + elif operation == 'modify': + cmd = 'f '+started_task_name+','+parameters + changed = False + stdout = "" + stderr = "" + rc, out, err = execute_command(cmd, timeout_s=wait_s, *args, **kwargs) + if "ERROR" in out or err != "": + changed = False + stdout = out + stderr = err + if err == "" or err is None: + stderr = out + else: + changed = True + stdout = out + stderr = err + + + result = dict() + + if module.check_mode: + module.exit_json(**result) + + result = dict( + changed=changed, + cmd=cmd, + remote_cmd=cmd, + rc=rc, + stdout=stdout, + stderr=stderr, + stdout_lines=stdout.split('\n'), + stderr_lines=stderr.split('\n'), + ) + + module.exit_json(**result) + + +if __name__ == '__main__': + run_module() diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py new file mode 100644 index 0000000000..04ae70c678 --- /dev/null +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -0,0 +1,193 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2025 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.utils import get_random_file_name +from shellescape import quote +import re + +TMP_DIRECTORY = "/tmp/" +PROC_PDS = "USER.PRIVATE.PROCLIB" +TASK_JCL_CONTENT="""//STEP1 EXEC PGM=BPXBATCH +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//STDPARM DD * +SH sleep 600 +/*""" + +def test_start_and_cancel_zos_started_task(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) + hosts.all.file(path=temp_path, state="directory") + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(TASK_JCL_CONTENT), temp_path) + ) + + hosts.all.zos_data_set( + name=data_set_name, state="present", type="pds", replace=True + ) + + hosts.all.shell( + cmd="dcp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(temp_path, data_set_name) + ) + + copy_result = hosts.all.zos_copy( + src="{0}(SAMPLE)".format(data_set_name), + dest=PROC_PDS, + remote_src=True, + force=True + ) + + for cp_res in copy_result.contacted.values(): + print(cp_res) + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == PROC_PDS + + start_results = hosts.all.zos_started_task( + operation="start", + member="SAMPLE" + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + stop_results = hosts.all.zos_started_task( + operation="cancel", + started_task_name="SAMPLE" + ) + + for result in stop_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + start_results = hosts.all.zos_started_task( + operation="start", + member="SAMPLE" + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + display_result = hosts.all.zos_started_task( + operation="display", + started_task_name="SAMPLE" + ) + for result in display_result.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + display_output = list(display_result.contacted.values())[0].get("stdout") + asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) + + stop_results = hosts.all.zos_started_task( + operation="cancel", + started_task_name="SAMPLE", + asid=asid_val + ) + + for result in stop_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + finally: + hosts.all.file(path=temp_path, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") + hosts.all.zos_data_set( + name=f"{PROC_PDS}(SAMPLE)", + state="absent", + type="member", + force=True + ) + +def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) + hosts.all.file(path=temp_path, state="directory") + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(TASK_JCL_CONTENT), temp_path) + ) + + hosts.all.zos_data_set( + name=data_set_name, state="present", type="pds", replace=True + ) + + hosts.all.shell( + cmd="dcp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(temp_path, data_set_name) + ) + + copy_result = hosts.all.zos_copy( + src="{0}(SAMPLE)".format(data_set_name), + dest=PROC_PDS, + remote_src=True, + force=True + ) + + for cp_res in copy_result.contacted.values(): + print(cp_res) + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") == PROC_PDS + + start_results = hosts.all.zos_started_task( + operation="start", + member="SAMPLE", + job_name="TESTTSK" + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + stop_results = hosts.all.zos_started_task( + operation="cancel", + started_task_name="TESTTSK" + ) + + for result in stop_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + finally: + hosts.all.file(path=temp_path, state="absent") + hosts.all.zos_data_set(name=data_set_name, state="absent") + hosts.all.zos_data_set( + name=f"{PROC_PDS}(SAMPLE)", + state="absent", + type="member", + force=True + ) \ No newline at end of file From 3e7ec314cc26b52a64f4ce4ae2b0983ad59e6fb4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 5 Aug 2025 11:05:41 -0500 Subject: [PATCH 03/73] [Enabler][2147-2149]update_job_modules_interfaces (#2208) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Test * Modify action job submit * Replace structure for empty jobs * Modify structure * Modify test to properly test new interface * Modify test on base of job submit new interface * Be alignt * Modify job submit * Add fragment * Better fragment * Update documentation * Fix commit * Fix documentation * Keep documentation * Replace job submit * Fix sanity * Fix sanity * Return values * Return values to module * Return values to test suite and empty response * Update changelogs/fragments/2208_update_job_modules_interfaces.yml Co-authored-by: Fernando Flores * Update changelogs/fragments/2208_update_job_modules_interfaces.yml Co-authored-by: Fernando Flores * Update changelogs/fragments/2208_update_job_modules_interfaces.yml Co-authored-by: Fernando Flores * Fix job submit * Validate test case * Validate test case * Validate test case * Validate test case * Validate test case * Validate test case * Validate test case * Update changelogs/fragments/2204-Update_zos_job_query_module.yml Co-authored-by: Alex Moreno * Update plugins/modules/zos_job_submit.py Co-authored-by: Fernando Flores * Update plugins/modules/zos_job_submit.py Co-authored-by: Fernando Flores * Update plugins/modules/zos_job_submit.py Co-authored-by: Alex Moreno * Update plugins/modules/zos_job_submit.py Co-authored-by: Alex Moreno * Update changelogs/fragments/2208_update_job_modules_interfaces.yml Co-authored-by: Alex Moreno * Update changelogs/fragments/2208_update_job_modules_interfaces.yml Co-authored-by: Alex Moreno * Update changelogs/fragments/2208_update_job_modules_interfaces.yml Co-authored-by: Alex Moreno --------- Co-authored-by: André Marcel Gutiérrez Benítez Co-authored-by: Fernando Flores Co-authored-by: Alex Moreno --- .../2204-Update_zos_job_query_module.yml | 2 +- .../2208_update_job_modules_interfaces.yml | 12 + plugins/action/zos_job_submit.py | 8 +- plugins/module_utils/job.py | 45 +- plugins/modules/zos_job_output.py | 134 ++- plugins/modules/zos_job_submit.py | 385 ++++--- .../modules/test_zos_job_output_func.py | 247 +++- .../modules/test_zos_job_query_func.py | 63 +- .../modules/test_zos_job_submit_func.py | 1015 ++++++++++++++--- 9 files changed, 1455 insertions(+), 456 deletions(-) create mode 100644 changelogs/fragments/2208_update_job_modules_interfaces.yml diff --git a/changelogs/fragments/2204-Update_zos_job_query_module.yml b/changelogs/fragments/2204-Update_zos_job_query_module.yml index 0e2a79c186..b3a2270c37 100644 --- a/changelogs/fragments/2204-Update_zos_job_query_module.yml +++ b/changelogs/fragments/2204-Update_zos_job_query_module.yml @@ -1,3 +1,3 @@ breaking_changes: - - zos_job_query - Return value ``message`` is deprecated in favor of ``msg``. Return value ``steps`` are no longer under `` but is now included under jobs. + - zos_job_query - Return field ``message`` is deprecated in favor of ``msg``. Return field ``steps`` is no longer under ``ret_code`` but is now included under ``jobs``. (https://github.com/ansible-collections/ibm_zos_core/pull/2204). \ No newline at end of file diff --git a/changelogs/fragments/2208_update_job_modules_interfaces.yml b/changelogs/fragments/2208_update_job_modules_interfaces.yml new file mode 100644 index 0000000000..e84fc8d9cf --- /dev/null +++ b/changelogs/fragments/2208_update_job_modules_interfaces.yml @@ -0,0 +1,12 @@ +breaking_changes: + - zos_job_output - Option name ``ddname`` is substituted for ``dd_name``, but ``ddname`` is kept as an alias. Return value ``changed`` is always returned and return value ``steps`` are no longer under ``ret_code`` + but is now included under `jobs`. Return value ``ddnames`` is replaced by ``dds`` and value ``ddname`` under ``ddnames`` is replaced by ``dd_name``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2208). + - zos_job_submit - Interface value ``location`` is replace for ``remote_src`` in a bool value and ``wait_time_s`` is replaced for ``wait_time``. + Return value ``ddnames`` replaced by ``dds`` and value ``ddname`` under ``ddnames`` is replace by ``dd_name``. Now all values are returned under ``jobs`` except ``changed``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2208). + +trivial: + - test/zos_job_query_func.py - Update test validation to new values for job submit. Return value ``steps`` are no longer under `ret_code` + but is now included under `jobs`. + (https://github.com/ansible-collections/ibm_zos_core/pull/2208). \ No newline at end of file diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 1f8cdf465a..5456fb3ec7 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -45,16 +45,16 @@ def run(self, tmp=None, task_vars=None): module_args = self._task.args.copy() use_template = _process_boolean(module_args.get("use_template")) - location = module_args.get("location") - if use_template and location != "local": + remote_src = module_args.get("remote_src") + if use_template and remote_src: result.update(dict( failed=True, changed=False, - msg="Use of Jinja2 templates is only valid for local files. Location is set to '{0}' but should be 'local'".format(location) + msg="Use of Jinja2 templates is only valid for local files. remote_src is set to '{0}' but should be False".format(remote_src) )) return result - if location == "local": + if not remote_src: source = self._task.args.get("src", None) diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 1b0351d682..d3f2fce065 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -167,6 +167,7 @@ def _job_not_found(job_id, owner, job_name, dd_name): job_not_found_msg = "with the name {0}".format(job_name.upper()) job = {} + job["job_not_found"] = True job["job_id"] = job_id job["job_name"] = job_name job["subsystem"] = None @@ -175,24 +176,36 @@ def _job_not_found(job_id, owner, job_name, dd_name): job["cpu_time"] = None job["execution_node"] = None job["origin_node"] = None + job["content_type"] = None + job["creation_date"] = None + job["creation_time"] = None + job["execution_time"] = None + job["job_class"] = None + job["svc_class"] = None + job["priority"] = None + job["asid"] = None + job["queue_position"] = None + job["program_name"] = None job["ret_code"] = {} job["ret_code"]["msg"] = None job["ret_code"]["code"] = None job["ret_code"]["msg_code"] = None job["ret_code"]["msg_txt"] = "The job {0} could not be found.".format(job_not_found_msg) + job["steps"] = [] - job["class"] = "" + job["class"] = None - job["ddnames"] = [] + job["dds"] = [] dd = {} - dd["ddname"] = dd_name - dd["record_count"] = "0" - dd["id"] = "" + dd["dd_name"] = dd_name + dd["record_count"] = 0 + dd["id"] = None dd["stepname"] = None - dd["procstep"] = "" - dd["byte_count"] = "0" - job["ddnames"].append(dd) + dd["procstep"] = None + dd["byte_count"] = 0 + dd["content"] = None + job["dds"].append(dd) jobs.append(job) @@ -351,8 +364,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job = {} job["job_id"] = entry.job_id job["job_name"] = entry.name - job["subsystem"] = "" - job["system"] = "" + job["subsystem"] = None + job["system"] = None job["owner"] = entry.owner job["cpu_time"] = None job["execution_node"] = None @@ -380,9 +393,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["creation_time"] = str(entry.creation_datetime)[12:] job["queue_position"] = entry.queue_position job["program_name"] = entry.program_name - job["class"] = "" + job["class"] = None job["steps"] = [] - job["ddnames"] = [] + job["dds"] = [] job["duration"] = duration if hasattr(entry, "execution_time"): job["execution_time"] = entry.execution_time @@ -444,7 +457,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T if dd_name not in single_dd["dd_name"]: continue else: - dd["ddname"] = single_dd["dd_name"] + dd["dd_name"] = single_dd["dd_name"] if "records" in single_dd: dd["record_count"] = single_dd["records"] @@ -495,8 +508,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T job["steps"].extend(_parse_steps(tmpcont)) - job["ddnames"].append(dd) - if len(job["class"]) < 1: + job["dds"].append(dd) + if job["class"] is None: job["class"] = entry.job_class if job["system"] is None: @@ -545,7 +558,7 @@ def _ddname_pattern(contents, resolve_dependencies): re.IGNORECASE, ): raise ValueError( - 'Invalid argument type for "{0}". Expected "ddname_pattern"'.format( + 'Invalid argument type for "{0}". Expected "dd_name_pattern"'.format( contents ) ) diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 19be248566..70451c7cae 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -31,7 +31,7 @@ such as "TCP*" or "*". - The owner can be specific such as "IBMUSER", or one that uses a pattern like "*". - - If there is no ddname, or if ddname="?", output of all the ddnames under + - If there is no dd_name, or if dd_name="?", output of all the dds under the given job will be displayed. version_added: "1.0.0" author: @@ -55,12 +55,13 @@ - The owner who ran the job. (e.g "IBMUSER", "*") type: str required: false - ddname: + dd_name: description: - Data definition name (show only this DD on a found job). (e.g "JESJCL", "?") type: str required: false + aliases: [ ddname ] attributes: action: @@ -75,21 +76,21 @@ """ EXAMPLES = r""" -- name: Job output with ddname +- name: Job output with dd_name zos_job_output: job_id: "STC02560" - ddname: "JESMSGLG" + dd_name: "JESMSGLG" -- name: JES Job output without ddname +- name: JES Job output without dd_name zos_job_output: job_id: "STC02560" -- name: JES Job output with all ddnames +- name: JES Job output with all dd_name zos_job_output: job_id: "STC*" job_name: "*" owner: "IBMUSER" - ddname: "?" + dd_name: "?" """ RETURN = r""" @@ -139,7 +140,7 @@ sample: "STL1" class: description: - Identifies the data set used in a system output data set, usually called a sysout data set. + Identifies the data set used in a system output data set, usually called a sysout data set. type: str sample: content_type: @@ -169,15 +170,15 @@ it represents the time elapsed from the job execution start and current time. type: str sample: 00:00:10 - ddnames: + dds: description: - Data definition names. + Data definition names. type: list elements: dict contains: - ddname: + dd_name: description: - Data definition name. + Data definition name. type: str sample: JESMSGLG record_count: @@ -187,7 +188,7 @@ sample: 17 id: description: - The file ID. + The file ID. type: str sample: 2 stepname: @@ -198,8 +199,8 @@ sample: JES2 procstep: description: - Identifies the set of statements inside JCL grouped together to - perform a particular function. + Identifies the set of statements inside JCL grouped together to + perform a particular function. type: str sample: PROC1 byte_count: @@ -209,7 +210,7 @@ sample: 574 content: description: - The ddname content. + The dd content. type: list elements: str sample: @@ -227,7 +228,7 @@ " 5 //SYSUT1 DD * ", " 6 //SYSUT2 DD SYSOUT=* ", " 7 // " - ] + ] job_class: description: Job class for this job. @@ -261,7 +262,7 @@ sample: "IEBGENER" ret_code: description: - Return code output collected from job log. + Return code output collected from job log. type: dict contains: msg: @@ -277,48 +278,48 @@ sample: S0C4 msg_txt: description: - Returns additional information related to the job. + Returns additional information related to the job. type: str sample: "No job can be located with this job name: HELLO" code: description: - Return code converted to integer value (when possible). + Return code converted to integer value (when possible). type: int sample: 00 - steps: - description: - Series of JCL steps that were executed and their return codes. - type: list - elements: dict - contains: - step_name: - description: - Name of the step shown as "was executed" in the DD section. - type: str - sample: "STEP0001" - step_cc: - description: - The CC returned for this step in the DD section. - type: int - sample: 0 sample: ret_code: { "code": 0, "msg": "CC 0000", "msg_code": "0000", "msg_txt": "", - "steps": [ - { "step_name": "STEP0001", - "step_cc": 0 - } - ] } + steps: + description: + Series of JCL steps that were executed and their return codes. + type: list + elements: dict + contains: + step_name: + description: + Name of the step shown as "was executed" in the DD section. + type: str + sample: "STEP0001" + step_cc: + description: + The CC returned for this step in the DD section. + type: int + sample: 0 + sample: [ + { "step_name": "STEP0001", + "step_cc": 0 + } + ] sample: [ { "class": "R", "content_type": "JOB", - "ddnames": [ + "dds": [ { "byte_count": "775", "content": [ @@ -340,7 +341,7 @@ "- 6 SYSOUT SPOOL KBYTES", "- 0.00 MINUTES EXECUTION TIME" ], - "ddname": "JESMSGLG", + "dd_name": "JESMSGLG", "id": "2", "procstep": "", "record_count": "17", @@ -364,7 +365,7 @@ " 6 //SYSUT2 DD SYSOUT=* ", " 7 // " ], - "ddname": "JESJCL", + "dd_name": "JESJCL", "id": "3", "procstep": "", "record_count": "14", @@ -393,7 +394,7 @@ " IEF033I JOB/HELLO /STOP 2020049.1025 ", " CPU: 0 HR 00 MIN 00.00 SEC SRB: 0 HR 00 MIN 00.00 SEC " ], - "ddname": "JESYSMSG", + "dd_name": "JESYSMSG", "id": "4", "procstep": "", "record_count": "19", @@ -407,7 +408,7 @@ " ", " PROCESSING ENDED AT EOD " ], - "ddname": "SYSPRINT", + "dd_name": "SYSPRINT", "id": "102", "procstep": "", "record_count": "4", @@ -418,7 +419,7 @@ "content": [ " HELLO, WORLD " ], - "ddname": "SYSUT2", + "dd_name": "SYSUT2", "id": "103", "procstep": "", "record_count": "1", @@ -439,12 +440,12 @@ "msg": "CC 0000", "msg_code": "0000", "msg_txt": "", - "steps": [ - { "step_name": "STEP0001", - "step_cc": 0 - } - ] }, + "steps": [ + { "step_name": "STEP0001", + "step_cc": 0 + } + ], "system": "STL1", "subsystem": "STL1", "cpu_time": 1414, @@ -456,7 +457,7 @@ description: Indicates if any changes were made during module operation type: bool - returned: on success + returned: always """ @@ -495,7 +496,7 @@ def run_module(): job_id=dict(type="str", required=False), job_name=dict(type="str", required=False), owner=dict(type="str", required=False), - ddname=dict(type="str", required=False), + dd_name=dict(type="str", required=False, aliases=['ddname']), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) @@ -504,7 +505,7 @@ def run_module(): job_id=dict(type="job_identifier", required=False), job_name=dict(type="job_identifier", required=False), owner=dict(type="str", required=False), - ddname=dict(type="str", required=False), + dd_name=dict(type="str", required=False, aliases=['ddname']), ) try: @@ -517,27 +518,34 @@ def run_module(): stderr=str(err) ) + results = {} + results["changed"] = False + job_id = module.params.get("job_id") job_name = module.params.get("job_name") owner = module.params.get("owner") - ddname = module.params.get("ddname") + dd_name = module.params.get("dd_name") if not job_id and not job_name and not owner: - module.fail_json(msg="Please provide a job_id or job_name or owner") + module.fail_json(msg="Please provide a job_id or job_name or owner", stderr="", **results) try: results = {} - results["jobs"] = job_output(job_id=job_id, owner=owner, job_name=job_name, dd_name=ddname) - results["changed"] = False + results["jobs"] = job_output(job_id=job_id, owner=owner, job_name=job_name, dd_name=dd_name) + for job in results["jobs"]: + if "job_not_found" in job: + results["changed"] = False + del job['job_not_found'] + else: + results["changed"] = True except zoau_exceptions.JobFetchException as fetch_exception: module.fail_json( - msg="ZOAU exception", - rc=fetch_exception.response.rc, - stdout=fetch_exception.response.stdout_response, + msg=f"ZOAU exception {fetch_exception.response.stdout_response} rc {fetch_exception.response.rc}", stderr=fetch_exception.response.stderr_response, + changed=False ) except Exception as e: - module.fail_json(msg=repr(e)) + module.fail_json(msg=repr(e), **results) module.exit_json(**results) diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 1852327ec5..d347efcc33 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -46,35 +46,31 @@ - When using a generation data set, only already created generations are valid. If either the relative name is positive, or negative but not found, the module will fail. - location: + remote_src: required: false - default: data_set - type: str - choices: - - data_set - - uss - - local + default: true + type: bool description: - - The JCL location. Supported choices are C(data_set), C(uss) or C(local). - - C(data_set) can be a PDS, PDSE, sequential data set, or a generation data set. - - C(uss) means the JCL location is located in UNIX System Services (USS). - - C(local) means locally to the Ansible control node. - wait_time_s: + - If set to C(false), the module searches for C(src) in the controller node. + - If set to C(true), the module searches for the file C(src) in the managed node. + wait_time: required: false default: 10 type: int description: - - Option I(wait_time_s) is the total time that module + - Option I(wait_time) is the total time that module L(zos_job_submit,./zos_job_submit.html) will wait for a submitted job to complete. The time begins when the module is executed on the managed node. - - I(wait_time_s) is measured in seconds and must be a value greater than 0 + - I(wait_time) is measured in seconds and must be a value greater than 0 and less than 86400. - - The module can submit and forget jobs by setting I(wait_time_s) to 0. This way the + - The module can submit and forget jobs by setting I(wait_time) to 0. This way the module will not try to retrieve the job details other than job id. Job details and contents can be retrieved later by using L(zos_job_query,./zos_job_query.html) or L(zos_job_output,./zos_job_output.html) if needed. + - If I(remote_src=False) and I(wait_time=0), the module will not clean the copy + of the file on the remote system, to avoid problems with job submission. max_rc: required: false type: int @@ -86,7 +82,7 @@ type: bool description: - Whether to print the DD output. - - If false, an empty list will be returned in the ddnames field. + - If false, an empty list will be returned in the dds field. volume: required: false type: str @@ -96,12 +92,12 @@ - When configured, the L(zos_job_submit,./zos_job_submit.html) will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - - Ignored for I(location=uss) and I(location=local). + - Ignored for I(remote_src=False). encoding: description: - Specifies which encoding the local JCL file should be converted from and to, before submitting the job. - - This option is only supported for when I(location=local). + - This option is only supported for when I(remote_src=False). - If this parameter is not provided, and the z/OS systems default encoding can not be identified, the JCL file will be converted from UTF-8 to IBM-1047 by default, otherwise the module will detect the z/OS system @@ -194,13 +190,13 @@ description: Total duration time of the job execution, if it has finished. type: str sample: 00:00:10 - ddnames: + dds: description: Data definition names. type: list elements: dict contains: - ddname: + dd_name: description: Data definition name. type: str @@ -234,7 +230,7 @@ sample: 574 content: description: - The ddname content. + The dd content. type: list elements: str sample: @@ -298,34 +294,35 @@ is the case of a job that errors or is active. type: int sample: 0 - steps: - description: - Series of JCL steps that were executed and their return codes. - type: list - elements: dict - contains: - step_name: - description: - Name of the step shown as "was executed" in the DD section. - type: str - sample: "STEP0001" - step_cc: - description: - The CC returned for this step in the DD section. - type: int - sample: 0 sample: ret_code: { "code": 0, "msg": "CC 0000", "msg_code": "0000", "msg_txt": "", - "steps": [ - { "step_name": "STEP0001", - "step_cc": 0 - }, - ] } + steps: + description: + Series of JCL steps that were executed and their return codes. + type: list + elements: dict + contains: + step_name: + description: + Name of the step shown as "was executed" in the DD section. + type: str + sample: "STEP0001" + step_cc: + description: + The CC returned for this step in the DD section. + type: int + sample: 0 + sample: + "steps": [ + { "step_name": "STEP0001", + "step_cc": 0 + } + ] job_class: description: Job class for this job. @@ -369,12 +366,12 @@ sample: "IEBGENER" system: description: - The job entry system that MVS uses to do work. + The job entry system that MVS uses to do work. type: str sample: STL1 subsystem: description: - The job entry subsystem that MVS uses to do work. + The job entry subsystem that MVS uses to do work. type: str sample: STL1 cpu_time: @@ -392,13 +389,11 @@ Origin node that submitted the job. type: str sample: "STL1" - sample: [ { - "class": "K", "content_type": "JOB", - "ddnames": [ + "dds": [ { "byte_count": "677", "content": [ @@ -419,7 +414,7 @@ "- 12 SYSOUT SPOOL KBYTES", "- 0.00 MINUTES EXECUTION TIME" ], - "ddname": "JESMSGLG", + "dd_name": "JESMSGLG", "id": "2", "procstep": "", "record_count": "16", @@ -476,7 +471,7 @@ " 15 ++SYSPRINT DD SYSOUT=* ", " ++* " ], - "ddname": "JESJCL", + "dd_name": "JESJCL", "id": "3", "procstep": "", "record_count": "47", @@ -530,7 +525,7 @@ " IEF033I JOB/DBDGEN00/STOP 2020073.1250 ", " CPU: 0 HR 00 MIN 00.03 SEC SRB: 0 HR 00 MIN 00.00 SEC " ], - "ddname": "JESYSMSG", + "dd_name": "JESYSMSG", "id": "4", "procstep": "", "record_count": "44", @@ -585,7 +580,7 @@ " **** END OF MESSAGE SUMMARY REPORT **** ", " " ], - "ddname": "SYSPRINT", + "dd_name": "SYSPRINT", "id": "102", "procstep": "L", "record_count": "45", @@ -594,18 +589,17 @@ ], "job_id": "JOB00361", "job_name": "DBDGEN00", - "owner": "OMVSADM", "ret_code": { "code": 0, "msg": "CC 0000", "msg_code": "0000", "msg_txt": "", - "steps": [ - { "step_name": "DLORD6", - "step_cc": 0 - } - ] }, + "steps": [ + { "step_name": "DLORD6", + "step_cc": 0 + } + ], "job_class": "K", "execution_time": "00:00:10", "svc_class": "?", @@ -628,19 +622,19 @@ - name: Submit JCL in a PDSE member. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: data_set + remote_src: true register: response - name: Submit JCL in USS with no DDs in the output. zos_job_submit: src: /u/tester/demo/sample.jcl - location: uss + remote_src: true return_output: false - name: Convert local JCL to IBM-037 and submit the job. zos_job_submit: src: /Users/maxy/ansible-playbooks/provision/sample.jcl - location: local + remote_src: false encoding: from: ISO8859-1 to: IBM-037 @@ -648,36 +642,36 @@ - name: Submit JCL in an uncataloged PDSE on volume P2SS01. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: data_set + remote_src: true volume: P2SS01 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: data_set - wait_time_s: 30 + remote_src: true + wait_time: 30 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: data_set - wait_time_s: 30 + remote_src: true + wait_time: 30 - name: Submit JCL and set the max return code the module should fail on to 16. zos_job_submit: src: HLQ.DATA.LLQ - location: data_set + remote_src: true max_rc: 16 - name: Submit JCL from the latest generation data set in a generation data group. zos_job_submit: src: HLQ.DATA.GDG(0) - location: data_set + remote_src: true - name: Submit JCL from a previous generation data set in a generation data group. zos_job_submit: src: HLQ.DATA.GDG(-2) - location: data_set + remote_src: true """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.encode import ( @@ -820,7 +814,7 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, start_ti job_fetched = jobs.fetch_multiple(job_submitted.job_id)[0] job_fetch_rc = job_fetched.return_code job_fetch_status = job_fetched.status - # Allow for jobs that need more time to be fectched to run the wait_time_s + # Allow for jobs that need more time to be fectched to run the wait_time except zoau_exceptions.JobFetchException as err: if duration >= timeout: raise err @@ -885,52 +879,6 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, start_ti return job_submitted.job_id if job_submitted else None, duration -def build_return_schema(result): - """ Builds return values schema with empty values. - - Parameters - ---------- - result : dict - Dictionary used to return values at execution finalization. - - Returns - ------- - dict - Dictionary used to return values at execution finalization. - """ - result = { - "jobs": [], - "job_id": None, - "job_name": None, - "duration": None, - "execution_time": None, - "ddnames": { - "ddname": None, - "record_count": None, - "id": None, - "stepname": None, - "procstep": None, - "byte_count": None, - "content": [], - }, - "ret_code": { - "code": None, - "msg": None, - "msg_code": None, - "msg_txt": None, - "steps": [], - }, - "job_class": None, - "svc_class": None, - "priority": None, - "asid": None, - "creation_date": None, - "queue_position": None, - "program_name": None, - } - return result - - def run_module(): """Initialize module. @@ -939,14 +887,14 @@ def run_module(): fail_json Parameter verification failed. fail_json - The value for option 'wait_time_s' is not valid. + The value for option 'wait_time' is not valid. """ module_args = dict( src=dict(type="str", required=True), - location=dict( - type="str", - default="data_set", - choices=["data_set", "uss", "local"], + remote_src=dict( + type="bool", + default=True, + required=False ), encoding=dict( type="dict", @@ -966,7 +914,7 @@ def run_module(): ), volume=dict(type="str", required=False), return_output=dict(type="bool", required=False, default=True), - wait_time_s=dict(type="int", default=10), + wait_time=dict(type="int", default=10), max_rc=dict(type="int", required=False), use_template=dict(type='bool', default=False), template_parameters=dict( @@ -1014,10 +962,10 @@ def run_module(): arg_defs = dict( src=dict(arg_type="data_set_or_path", required=True), - location=dict( - arg_type="str", - default="data_set", - choices=["data_set", "uss", "local"], + remote_src=dict( + arg_type="bool", + default=True, + required=False ), from_encoding=dict( arg_type="encoding", default=Defaults.DEFAULT_ASCII_CHARSET, required=False), @@ -1026,7 +974,7 @@ def run_module(): ), volume=dict(arg_type="volume", required=False), return_output=dict(arg_type="bool", default=True), - wait_time_s=dict(arg_type="int", required=False, default=10), + wait_time=dict(arg_type="int", required=False, default=10), max_rc=dict(arg_type="int", required=False), ) @@ -1042,22 +990,21 @@ def run_module(): msg="Parameter verification failed", stderr=str(err)) # Extract values from set module options - location = parsed_args.get("location") + remote_src = parsed_args.get("remote_src") volume = parsed_args.get("volume") src = parsed_args.get("src") return_output = parsed_args.get("return_output") - wait_time_s = parsed_args.get("wait_time_s") + wait_time = parsed_args.get("wait_time") max_rc = parsed_args.get("max_rc") - temp_file = parsed_args.get("src") if location == "local" else None + temp_file = parsed_args.get("src") if not remote_src else None # Default 'changed' is False in case the module is not able to execute result = dict(changed=False) # Builds return value schema to make sure we return the return values schema. - result = build_return_schema(result) - if wait_time_s < 0 or wait_time_s > MAX_WAIT_TIME_S: + if wait_time < 0 or wait_time > MAX_WAIT_TIME_S: result["failed"] = True - result["msg"] = ("The value for option 'wait_time_s' is not valid, it must " + result["msg"] = ("The value for option 'wait_time' is not valid, it must " "be greater than 0 and less than {0}.".format(str(MAX_WAIT_TIME_S))) module.fail_json(**result) @@ -1065,68 +1012,72 @@ def run_module(): duration = 0 start_time = timer() - if location == "data_set": - # Resolving a relative GDS name and escaping special symbols if needed. - src_data = data_set.MVSDataSet(src) - - # Checking that the source is actually present on the system. - if volume is not None: - volumes = [volume] - # Get the data set name to catalog it. - src_ds_name = data_set.extract_dsname(src_data.name) - present, changed = DataSet.attempt_catalog_if_necessary(src_ds_name, volumes) - - if not present: - module.fail_json( - msg=(f"Unable to submit job {src_data.name} because the data set could " - f"not be cataloged on the volume {volume}.") - ) - elif data_set.is_member(src_data.name): - if not DataSet.data_set_member_exists(src_data.name): - module.fail_json(msg=f"Cannot submit job, the data set member {src_data.raw_name} was not found.") + if remote_src: + if "/" in src: + if path.exists(src): + if path.isfile(src): + job_submitted_id, duration = submit_src_jcl( + module, src, src_name=src, timeout=wait_time, is_unix=True) + else: + module.fail_json(msg=f"Unable to submit job {src} is a folder, must be a file.", **result) + else: + module.fail_json(msg=f"Unable to submit job {src} does not exists.", **result) else: - if not DataSet.data_set_exists(src_data.name): - module.fail_json(msg=f"Cannot submit job, the data set {src_data.raw_name} was not found.") + # Resolving a relative GDS name and escaping special symbols if needed. + src_data = data_set.MVSDataSet(src) + + # Checking that the source is actually present on the system. + if volume is not None: + volumes = [volume] + # Get the data set name to catalog it. + src_ds_name = data_set.extract_dsname(src_data.name) + present, changed = DataSet.attempt_catalog_if_necessary(src_ds_name, volumes) + + if not present: + module.fail_json( + msg=(f"Unable to submit job {src_data.name} because the data set could " + f"not be cataloged on the volume {volume}."), **result + ) + elif data_set.is_member(src_data.name): + if not DataSet.data_set_member_exists(src_data.name): + module.fail_json(msg=f"Cannot submit job, the data set member {src_data.raw_name} was not found.", **result) + else: + if not DataSet.data_set_exists(src_data.name): + module.fail_json(msg=f"Cannot submit job, the data set {src_data.raw_name} was not found.", **result) + job_submitted_id, duration = submit_src_jcl( + module, src_data.name, src_name=src_data.raw_name, timeout=wait_time, is_unix=False, start_time=start_time) + else: job_submitted_id, duration = submit_src_jcl( - module, src_data.name, src_name=src_data.raw_name, timeout=wait_time_s, is_unix=False, start_time=start_time) - elif location == "uss": - job_submitted_id, duration = submit_src_jcl( - module, src, src_name=src, timeout=wait_time_s, is_unix=True) - elif location == "local": - job_submitted_id, duration = submit_src_jcl( - module, src, src_name=src, timeout=wait_time_s, is_unix=True) + module, src, src_name=src, timeout=wait_time, is_unix=True) # Explictly pass None for the unused args else a default of '*' will be # used and return undersirable results job_output_txt = None - result['job_id'] = job_submitted_id is_changed = True # If wait_time_s is 0, we do a deploy and forget strategy. - if wait_time_s != 0: + if wait_time != 0: try: job_output_txt = job_output( job_id=job_submitted_id, owner=None, job_name=None, dd_name=None, - dd_scan=return_output, duration=duration, timeout=wait_time_s, start_time=start_time) + dd_scan=return_output, duration=duration, timeout=wait_time, start_time=start_time) # This is resolvig a bug where the duration coming from job_output is passed by value, duration # being an immutable type can not be changed and must be returned or accessed from the job.py. - if job_output is not None: + if job_output_txt is not None: duration = job_output_txt[0].get("duration") if not None else duration - result["execution_time"] = job_output_txt[0].get("execution_time") + job_output_txt = parsing_job_response(job_output_txt, duration) - result["duration"] = duration - - if duration >= wait_time_s: + if duration >= wait_time: result["failed"] = True result["changed"] = False _msg = ("The JCL submitted with job id {0} but appears to be a long " "running job that exceeded its maximum wait time of {1} " "second(s). Consider using module zos_job_query to poll for " - "a long running job or increase option 'wait_time_s' to a value " - "greater than {2}.".format(str(job_submitted_id), str(wait_time_s), str(duration))) + "a long running job or increase option 'wait_time' to a value " + "greater than {2}.".format(str(job_submitted_id), str(wait_time), str(duration))) _msg_suffix = ("Consider using module zos_job_query to poll for " - "a long running job or increase option 'wait_time_s' to a value " + "a long running job or increase option 'wait_time' to a value " "greater than {0}.".format(str(duration))) if job_output_txt is not None: @@ -1142,6 +1093,7 @@ def run_module(): if job_output_txt: result["jobs"] = job_output_txt job_ret_code = job_output_txt[0].get("ret_code") + steps = job_output_txt[0].get("steps") if job_ret_code: job_ret_code_msg = job_ret_code.get("msg") @@ -1149,7 +1101,7 @@ def run_module(): job_ret_code_msg_code = job_ret_code.get("msg_code") if return_output is True and max_rc is not None: - is_changed = assert_valid_return_code(max_rc, job_ret_code_code, job_ret_code, result) + is_changed = assert_valid_return_code(max_rc, job_ret_code_code, job_ret_code, steps, result) if job_ret_code_msg is not None: if re.search("^(?:{0})".format("|".join(JOB_STATUSES)), job_ret_code_msg): @@ -1162,8 +1114,8 @@ def run_module(): raise Exception(_msg) if job_ret_code_code is not None and job_ret_code_msg == 'NOEXEC': - job_dd_names = job_output_txt[0].get("ddnames") - jes_jcl_dd = search_dictionaries("ddname", "JESJCL", job_dd_names) + job_dd_names = job_output_txt[0].get("dds") + jes_jcl_dd = search_dictionaries("dd_name", "JESJCL", job_dd_names) # These are the conditions for a job run with TYPRUN=COPY. if not jes_jcl_dd: job_ret_code.update({"msg": "TYPRUN=COPY"}) @@ -1179,8 +1131,8 @@ def run_module(): # so further analyze the # JESJCL DD to figure out if its a TYPRUN job - job_dd_names = job_output_txt[0].get("ddnames") - jes_jcl_dd = search_dictionaries("ddname", "JESJCL", job_dd_names) + job_dd_names = job_output_txt[0].get("dds") + jes_jcl_dd = search_dictionaries("dd_name", "JESJCL", job_dd_names) # Its possible jobs don't have a JESJCL which are active and this would # cause an index out of range error. @@ -1228,7 +1180,7 @@ def run_module(): if not return_output: for job in result.get("jobs", []): - job["ddnames"] = [] + job["dds"] = [] else: _msg = "The 'ret_code' dictionary was unavailable in the job log." result["ret_code"] = None @@ -1237,7 +1189,7 @@ def run_module(): else: _msg = "The job output log is unavailable." result["stderr"] = _msg - result["jobs"] = None + result["jobs"] = [] raise Exception(_msg) except Exception as err: result["failed"] = True @@ -1251,14 +1203,15 @@ def run_module(): finally: if temp_file is not None: shutil.rmtree(path.dirname(temp_file)) + else: + result["jobs"] = build_empty_response(job_submitted_id) # If max_rc is set, we don't want to default to changed=True, rely on 'is_changed' result["changed"] = True if is_changed else False - result["failed"] = False module.exit_json(**result) -def assert_valid_return_code(max_rc, job_rc, ret_code, result): +def assert_valid_return_code(max_rc, job_rc, ret_code, steps, result): """Asserts valid return code. Parameters @@ -1301,7 +1254,7 @@ def assert_valid_return_code(max_rc, job_rc, ret_code, result): result["stderr"] = _msg raise Exception(_msg) - for step in ret_code["steps"]: + for step in steps: step_cc_rc = int(step["step_cc"]) step_name_for_rc = step["step_name"] if step_cc_rc > max_rc: @@ -1325,6 +1278,76 @@ def assert_valid_return_code(max_rc, job_rc, ret_code, result): return True +def parsing_job_response(jobs_raw, duration): + """_summary_ + + Args: + jobs_raw (_type_): _description_ + """ + job = jobs_raw[0] + jobs = [] + for job in jobs_raw: + job_dict = { + "job_id": job.get("job_id"), + "job_name": job.get("job_name"), + "content_type": job.get("content_type"), + "duration": duration, + "execution_time": job.get("execution_time"), + "dds": job.get("dds"), + "ret_code": job.get("ret_code"), + "steps": job.get("steps"), + "job_class": job.get("job_class"), + "svc_class": job.get("svc_class"), + "system": job.get("system"), + "subsystem": job.get("subsystem"), + "origin_node": job.get("origin_node"), + "cpu_time": job.get("cpu_time"), + "execution_node": job.get("execution_node"), + "priority": job.get("priority"), + "asid": job.get("asid"), + "creation_date": job.get("creation_date"), + "creation_time": job.get("creation_time"), + "queue_position": job.get("queue_position"), + "program_name": job.get("program_name"), + } + jobs.append(job_dict) + return jobs + + +def build_empty_response(job_submitted_id): + """_summary_ + + Args: + jobs_raw (_type_): _description_ + """ + jobs = [] + job_dict = { + "job_id": job_submitted_id, + "job_name": None, + "content_type": None, + "duration": None, + "execution_time": None, + "dds": [], + "ret_code": {"code": None, "msg": None, "msg_code": None, "msg_txt": None}, + "steps": [], + "job_class": None, + "svc_class": None, + "system": None, + "subsystem": None, + "origin_node": None, + "cpu_time": None, + "execution_node": None, + "priority": None, + "asid": None, + "creation_date": None, + "creation_time": None, + "queue_position": None, + "program_name": None, + } + jobs.append(job_dict) + return jobs + + def main(): run_module() diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index e1db120d2f..64caf0f3ec 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -37,6 +37,9 @@ def test_zos_job_output_no_job_id(ansible_zos_module): results = hosts.all.zos_job_output(job_id="") for result in results.contacted.values(): assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("msg") is not None + assert result.get("failed") is True assert result.get("jobs") is None @@ -46,7 +49,9 @@ def test_zos_job_output_invalid_job_id(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("stderr") is not None + assert result.get("msg") is not None assert result.get("failed") is True + assert result.get("jobs") is None def test_zos_job_output_no_job_name(ansible_zos_module): @@ -54,6 +59,9 @@ def test_zos_job_output_no_job_name(ansible_zos_module): results = hosts.all.zos_job_output(job_name="") for result in results.contacted.values(): assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("msg") is not None + assert result.get("failed") is True assert result.get("jobs") is None @@ -62,7 +70,46 @@ def test_zos_job_output_invalid_job_name(ansible_zos_module): results = hosts.all.zos_job_output(job_name="INVALID") for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("subsystem") is None + assert job.get("system") is None + assert job.get("owner") is not None + assert job.get("cpu_time") is None + assert job.get("execution_node") is None + assert job.get("origin_node") is None + assert job.get("content_type") is None + assert job.get("creation_date") is None + assert job.get("creation_time") is None + assert job.get("execution_time") is None + assert job.get("job_class") is None + assert job.get("svc_class") is None + assert job.get("priority") is None + assert job.get("asid") is None + assert job.get("queue_position") is None + assert job.get("program_name") is None + assert job.get("class") is None + assert job.get("steps") is not None + assert job.get("dds") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is None + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") == "unavailable" + assert dds.get("record_count") == 0 + assert dds.get("id") is None + assert dds.get("stepname") is None + assert dds.get("procstep") is None + assert dds.get("byte_count") == 0 + assert dds.get("content") is None def test_zos_job_output_no_owner(ansible_zos_module): @@ -71,14 +118,20 @@ def test_zos_job_output_no_owner(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("msg") is not None + assert result.get("stderr") is not None + assert result.get("failed") is True + assert result.get("jobs") is None def test_zos_job_output_invalid_owner(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_output(owner="INVALID") for result in results.contacted.values(): - assert result.get("failed") is True + assert result.get("changed") is False assert result.get("stderr") is not None + assert result.get("msg") is not None + assert result.get("failed") is True + assert result.get("jobs") is None def test_zos_job_output_reject(ansible_zos_module): @@ -87,6 +140,9 @@ def test_zos_job_output_reject(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("msg") is not None + assert result.get("stderr") is not None + assert result.get("failed") is True + assert result.get("jobs") is None def test_zos_job_output_job_exists(ansible_zos_module): @@ -99,29 +155,96 @@ def test_zos_job_output_job_exists(ansible_zos_module): ) jobs = hosts.all.zos_job_submit( - src=f"{TEMP_PATH}/SAMPLE", location="uss", volume=None + src=f"{TEMP_PATH}/SAMPLE", remote_src=True, volume=None ) for job in jobs.contacted.values(): - print(job) + assert job.get("changed") is True + assert job.get("msg", False) is False assert job.get("jobs") is not None - for job in jobs.contacted.values(): - submitted_job_id = job.get("jobs")[0].get("job_id") - assert submitted_job_id is not None + job_ = job.get("jobs")[0] + assert job_.get("job_id") is not None + submitted_job_id = job_.get("job_id") + assert job_.get("job_name") is not None + assert job_.get("content_type") is not None + assert job_.get("duration") is not None + assert job_.get("execution_time") is not None + assert job_.get("job_class") is not None + assert job_.get("svc_class") is None + assert job_.get("priority") is not None + assert job_.get("asid") is not None + assert job_.get("creation_date") is not None + assert job_.get("creation_time") is not None + assert job_.get("queue_position") is not None + assert job_.get("program_name") is not None + + dds = job_.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job_.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job_.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" results = hosts.all.zos_job_output(job_id=submitted_job_id) # was SAMPLE?! for result in results.contacted.values(): - assert result.get("changed") is False + assert result.get("changed") is True + assert result.get("msg", False) is False assert result.get("jobs") is not None - assert result.get("jobs")[0].get("ret_code").get("steps") is not None - assert result.get("jobs")[0].get("ret_code").get("steps")[0].get("step_name") == "STEP0001" - assert result.get("jobs")[0].get("content_type") == "JOB" - assert result.get("jobs")[0].get("execution_time") is not None - assert "system" in result.get("jobs")[0] - assert "subsystem" in result.get("jobs")[0] - assert "cpu_time" in result.get("jobs")[0] - assert "execution_node" in result.get("jobs")[0] - assert "origin_node" in result.get("jobs")[0] + + job = result.get("jobs")[0] + assert job.get("job_id") == submitted_job_id + assert job.get("job_name") is not None + assert job.get("subsystem") is not None + assert job.get("system") is not None + assert job.get("owner") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("origin_node") is not None + assert job.get("content_type") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + assert job.get("class") is not None + assert job.get("steps") is not None + assert job.get("dds") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + finally: hosts.all.file(path=TEMP_PATH, state="absent") @@ -134,17 +257,55 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): cmd=f"echo {quote(JCL_FILE_CONTENTS)} > {TEMP_PATH}/SAMPLE" ) result = hosts.all.zos_job_submit( - src=f"{TEMP_PATH}/SAMPLE", location="uss", volume=None + src=f"{TEMP_PATH}/SAMPLE", remote_src=True, volume=None ) hosts.all.file(path=TEMP_PATH, state="absent") dd_name = "JESMSGLG" results = hosts.all.zos_job_output(job_name="HELLO", ddname=dd_name) for result in results.contacted.values(): - assert result.get("changed") is False + assert result.get("changed") is True + assert result.get("msg", False) is False assert result.get("jobs") is not None - for job in result.get("jobs"): - assert len(job.get("ddnames")) == 1 - assert job.get("ddnames")[0].get("ddname") == dd_name + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("subsystem") is not None + assert job.get("system") is not None + assert job.get("owner") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("origin_node") is not None + assert job.get("content_type") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + assert job.get("class") is not None + assert job.get("steps") is not None + assert job.get("dds") is not None + assert len(job.get("dds")) == 1 + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" + + dds = job.get("dds")[0] + assert dds.get("dd_name") == dd_name + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + finally: hosts.all.file(path=TEMP_PATH, state="absent") @@ -153,4 +314,44 @@ def test_zos_job_submit_job_id_and_owner_included(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_output(job_id="STC00*", owner="MASTER") for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None + assert result.get("changed") is False + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("subsystem") is None + assert job.get("system") is None + assert job.get("owner") is not None + assert job.get("cpu_time") is None + assert job.get("execution_node") is None + assert job.get("origin_node") is None + assert job.get("content_type") is None + assert job.get("creation_date") is None + assert job.get("creation_time") is None + assert job.get("execution_time") is None + assert job.get("job_class") is None + assert job.get("svc_class") is None + assert job.get("priority") is None + assert job.get("asid") is None + assert job.get("queue_position") is None + assert job.get("program_name") is None + assert job.get("class") is None + assert job.get("steps") is not None + assert job.get("dds") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is None + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") == "unavailable" + assert dds.get("record_count") == 0 + assert dds.get("id") is None + assert dds.get("stepname") is None + assert dds.get("procstep") is None + assert dds.get("byte_count") == 0 + assert dds.get("content") is None diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 929200a748..b9160d7d2f 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -111,41 +111,45 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): cmd=f"cp {temp_path}/SAMPLE \"//'{jdata_set_name}(SAMPLE)'\"" ) results = hosts.all.zos_job_submit( - src=f"{jdata_set_name}(SAMPLE)", location="data_set", wait_time_s=10 + src=f"{jdata_set_name}(SAMPLE)", remote_src=True, wait_time=10 ) for result in results.contacted.values(): - # Default validation assert result.get("changed") is True - assert result.get("jobs") is not None assert result.get("msg", False) is False + assert result.get("jobs") is not None job = result.get("jobs")[0] - assert job.get("job_name") is not None - assert job.get("owner") is not None assert job.get("job_id") is not None + assert job.get("job_name") is not None assert job.get("content_type") is not None - assert job.get("system") is not None - assert job.get("subsystem") is not None - assert job.get("origin_node") is not None - assert job.get("execution_node") is not None - assert job.get("cpu_time") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None assert job.get("job_class") is not None + assert job.get("svc_class") is None assert job.get("priority") is not None assert job.get("asid") is not None assert job.get("creation_date") is not None assert job.get("creation_time") is not None + assert job.get("queue_position") is not None assert job.get("program_name") is not None - assert job.get("svc_class") is None - assert job.get("steps") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None step = job.get("steps")[0] assert step.get("step_name") is not None assert step.get("step_cc") is not None rc = job.get("ret_code") - assert rc.get("msg") is not None - assert rc.get("msg_code") == "0000" + assert rc.get("msg") == "CC" assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" assert rc.get("msg_txt") == "CC" fulljobid = job.get("job_id") @@ -204,40 +208,45 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): cmd=f"cp {temp_path}/SAMPLE \"//'{ndata_set_name}(SAMPLE)'\"" ) results = hosts.all.zos_job_submit( - src=f"{ndata_set_name}(SAMPLE)", location="data_set", wait_time_s=10 + src=f"{ndata_set_name}(SAMPLE)", remote_src=True, wait_time=10 ) for result in results.contacted.values(): assert result.get("changed") is True - assert result.get("jobs") is not None assert result.get("msg", False) is False + assert result.get("jobs") is not None job = result.get("jobs")[0] - assert job.get("job_name") is not None - assert job.get("owner") is not None assert job.get("job_id") is not None + assert job.get("job_name") is not None assert job.get("content_type") is not None - assert job.get("system") is not None - assert job.get("subsystem") is not None - assert job.get("origin_node") is not None - assert job.get("execution_node") is not None - assert job.get("cpu_time") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None assert job.get("job_class") is not None + assert job.get("svc_class") is None assert job.get("priority") is not None assert job.get("asid") is not None assert job.get("creation_date") is not None assert job.get("creation_time") is not None + assert job.get("queue_position") is not None assert job.get("program_name") is not None - assert job.get("svc_class") is None - assert job.get("steps") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None step = job.get("steps")[0] assert step.get("step_name") is not None assert step.get("step_cc") is not None rc = job.get("ret_code") - assert rc.get("msg") is not None - assert rc.get("msg_code") == "0000" + assert rc.get("msg") == "CC" assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" assert rc.get("msg_txt") == "CC" jobname = "HE*L*" diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index fca001a3ec..efa4140794 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -416,7 +416,7 @@ - name: Submit async job. ibm.ibm_zos_core.zos_job_submit: src: {3} - location: local + remote_src: false async: 45 poll: 0 register: job_task @@ -475,22 +475,56 @@ def test_job_submit_pds(ansible_zos_module, location): ) if bool(location.get("default_location")): results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(data_set_name), wait_time_s=30 + src="{0}(SAMPLE)".format(data_set_name), remote_src=True, wait_time=30 ) else: results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(data_set_name), location="data_set", wait_time_s=30 + src="{0}(SAMPLE)".format(data_set_name), remote_src=True, wait_time=30 ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True - assert "system" in result.get("jobs")[0] - assert "subsystem" in result.get("jobs")[0] - assert "cpu_time" in result.get("jobs")[0] - assert "execution_node" in result.get("jobs")[0] - assert "origin_node" in result.get("jobs")[0] + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -515,12 +549,51 @@ def test_job_submit_pds_special_characters(ansible_zos_module): ) results = hosts.all.zos_job_submit( src="{0}(SAMPLE)".format(data_set_name_special_chars), - location="data_set", + remote_src=True, ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name_special_chars, state="absent") @@ -535,13 +608,51 @@ def test_job_submit_uss(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), temp_path) ) results = hosts.all.zos_job_submit( - src=f"{temp_path}/SAMPLE", location="uss", volume=None + src=f"{temp_path}/SAMPLE", remote_src=True, volume=None ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get("jobs")[0].get("content_type") == "JOB" assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") @@ -555,35 +666,41 @@ def test_job_submit_and_forget_uss(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), temp_path) ) results = hosts.all.zos_job_submit( - src=f"{temp_path}/SAMPLE", location="uss", volume=None, wait_time_s=0, + src=f"{temp_path}/SAMPLE", remote_src=True, volume=None, wait_time=0, ) for result in results.contacted.values(): - assert result.get("job_id") is not None assert result.get("changed") is True - assert len(result.get("jobs")) == 0 - assert result.get("job_name") is None - assert result.get("duration") is None - assert result.get("execution_time") is None - assert result.get("ddnames") is not None - assert result.get("ddnames").get("ddname") is None - assert result.get("ddnames").get("record_count") is None - assert result.get("ddnames").get("id") is None - assert result.get("ddnames").get("stepname") is None - assert result.get("ddnames").get("procstep") is None - assert result.get("ddnames").get("byte_count") is None - assert len(result.get("ddnames").get("content")) == 0 - assert result.get("ret_code") is not None - assert result.get("ret_code").get("msg") is None - assert result.get("ret_code").get("msg_code") is None - assert result.get("ret_code").get("code") is None - assert len(result.get("ret_code").get("steps")) == 0 - assert result.get("job_class") is None - assert result.get("svc_class") is None - assert result.get("priority") is None - assert result.get("asid") is None - assert result.get("creation_time") is None - assert result.get("queue_position") is None - assert result.get("program_name") is None + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is None + assert job.get("content_type") is None + assert job.get("duration") is None + assert job.get("execution_time") is None + assert job.get("job_class") is None + assert job.get("svc_class") is None + assert job.get("system") is None + assert job.get("subsystem") is None + assert job.get("origin_node") is None + assert job.get("cpu_time") is None + assert job.get("execution_node") is None + assert job.get("priority") is None + assert job.get("asid") is None + assert job.get("creation_date") is None + assert job.get("creation_time") is None + assert job.get("queue_position") is None + assert job.get("program_name") is None + assert job.get("dds") is not None + assert len(job.get("dds")) == 0 + assert job.get("steps") is not None + assert len(job.get("steps")) == 0 + rc = job.get("ret_code") + assert rc.get("msg") is None + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is None finally: hosts.all.file(path=temp_path, state="absent") @@ -593,13 +710,51 @@ def test_job_submit_local(ansible_zos_module): with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) + results = hosts.all.zos_job_submit(src=tmp_file.name, remote_src=False, wait_time=10) for result in results.contacted.values(): - print(result) - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" def test_job_submit_local_extra_r(ansible_zos_module): @@ -607,12 +762,51 @@ def test_job_submit_local_extra_r(ansible_zos_module): with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_BACKSLASH_R) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) + results = hosts.all.zos_job_submit(src=tmp_file.name, remote_src=False, wait_time=10) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" def test_job_submit_local_badjcl(ansible_zos_module): @@ -620,11 +814,12 @@ def test_job_submit_local_badjcl(ansible_zos_module): with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_BAD) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) + results = hosts.all.zos_job_submit(src=tmp_file.name, remote_src=False, wait_time=10) for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." assert result.get("changed") is False + assert result.get("msg") is not None + assert result.get("failed") is True assert re.search(r'completion code', repr(result.get("msg"))) @@ -655,13 +850,52 @@ def test_job_submit_pds_volume(ansible_zos_module, volumes_on_systems): results = hosts.all.zos_job_submit( src=data_set_name+"(SAMPLE)", - location="data_set", + remote_src=True, volume=volume_1 ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get('changed') is True + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -689,14 +923,51 @@ def test_job_submit_pds_5_sec_job_wait_15(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", - location="data_set", wait_time_s=wait_time_s) + remote_src=True, wait_time=wait_time_s) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get('changed') is True - assert result.get('duration') <= wait_time_s - assert result.get('execution_time') is not None + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") <= wait_time_s + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -724,18 +995,56 @@ def test_job_submit_pds_30_sec_job_wait_60(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", - location="data_set", wait_time_s=wait_time_s) + remote_src=True, wait_time=wait_time_s) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get('changed') is True - assert result.get('duration') <= wait_time_s - assert result.get('execution_time') is not None + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") <= wait_time_s + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") + def test_job_submit_pds_30_sec_job_wait_10_negative(ansible_zos_module): """This submits a 30 second job and only waits 10 seconds""" try: @@ -759,15 +1068,50 @@ def test_job_submit_pds_30_sec_job_wait_10_negative(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", - location="data_set", wait_time_s=wait_time_s) + remote_src=True, wait_time=wait_time_s) for result in results.contacted.values(): + assert result.get("changed") is False assert result.get("msg") is not None - assert result.get('changed') is False - assert result.get('duration') >= wait_time_s - # expecting at least "long running job that exceeded its maximum wait" + assert result.get("failed") is True assert re.search(r'exceeded', repr(result.get("msg"))) - assert result.get('execution_time') is not None + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") >= wait_time_s + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is not None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is None + assert len(job.get("steps")) == 0 + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") == 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") == 0 + assert dds.get("content") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "AC" + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -797,22 +1141,23 @@ def test_job_submit_max_rc(ansible_zos_module, args): results = hosts.all.zos_job_submit( src=tmp_file.name, - location="local", + remote_src=False, max_rc=args["max_rc"], - wait_time_s=args["wait_time_s"] + wait_time=args["wait_time_s"] ) for result in results.contacted.values(): # Should fail normally as a non-zero RC will result in job submit failure if args["max_rc"] is None: + assert result.get("changed") is False assert result.get("msg") is not None - assert result.get('changed') is False + assert result.get("failed") is True # On busy systems, it is possible that the duration even for a job with a non-zero return code # will take considerable time to obtain the job log and thus you could see either error msg below #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" # - Consider using module zos_job_query to poll for a long running job or # increase option \\'wait_times_s` to a value greater than 10.", - duration = result.get('duration') + duration = result.get("jobs")[0].get('duration') if duration >= args["wait_time_s"]: re.search(r'long running job', repr(result.get("msg"))) @@ -821,13 +1166,14 @@ def test_job_submit_max_rc(ansible_zos_module, args): # Should fail with normally as well, job fails with an RC 8 yet max is set to 4 elif args["max_rc"] == 4: - assert result.get("msg") is not None - assert result.get('changed') is False # Expecting "The job return code, # 'ret_code[code]' 8 for the submitted job is greater # than the value set for option 'max_rc' 4. # Increase the value for 'max_rc' otherwise # this job submission has failed. + assert result.get("changed") is False + assert result.get("msg") is not None + assert result.get("failed") is True assert re.search( r'the submitted job is greater than the value set for option', repr(result.get("msg")) @@ -837,9 +1183,48 @@ def test_job_submit_max_rc(ansible_zos_module, args): # Will not fail and as the max_rc is set to 12 and the rc is 8 is a change true # there are other possibilities like an ABEND or JCL ERROR will fail this even # with a MAX RC - assert result.get("msg") is None - assert result.get('changed') is True - assert result.get("jobs")[0].get("ret_code").get("code") < 12 + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") < 12 + assert rc.get("msg_code") != "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=tmp_file.name, state="absent") @@ -909,15 +1294,54 @@ def test_job_submit_jinja_template(ansible_zos_module, args): results = hosts.all.zos_job_submit( src=tmp_file.name, - location="local", + remote_src=False, use_template=True, template_parameters=args["options"] ) for result in results.contacted.values(): - assert result.get('changed') is True - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: os.remove(tmp_file.name) @@ -933,16 +1357,55 @@ def test_job_submit_full_input(ansible_zos_module): ) results = hosts.all.zos_job_submit( src=f"{temp_path}/SAMPLE", - location="uss", + remote_src=True, volume=None, # This job used to set wait=True, but since it has been deprecated # and removed, it now waits up to 30 seconds. - wait_time_s=30 + wait_time=30 ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") @@ -952,11 +1415,49 @@ def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_NO_DSN) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time_s=20, location="local") + results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time=20, remote_src=False) for result in results.contacted.values(): assert result.get("changed") is False + assert result.get("msg") is not None assert re.search(r'completion code', repr(result.get("msg"))) - assert result.get("jobs")[0].get("job_id") is not None + assert result.get("failed") is True + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + assert len(job.get("steps")) == 0 + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "JCLERR" + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): @@ -964,16 +1465,53 @@ def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_INVALID_USER) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="local") + results = hosts.all.zos_job_submit(src=tmp_file.name, remote_src=False) for result in results.contacted.values(): assert result.get("changed") is False assert re.search(r'please review the error for further details', repr(result.get("msg"))) assert re.search(r'please review the job log for status SEC', repr(result.get("msg"))) - assert result.get("jobs")[0].get("job_id") is not None + assert result.get("failed") is True + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is None + assert len(job.get("steps")) == 0 + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "SEC" + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None assert re.search( r'please review the job log for status SEC', - repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + repr(rc.get("msg_txt")) ) @@ -983,22 +1521,55 @@ def test_job_submit_local_jcl_typrun_scan(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="local", - wait_time_s=20, + remote_src=False, + wait_time=20, encoding={ "from": "UTF-8", "to": "IBM-1047" },) for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs")[0].get("job_id") is not None + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is None + assert len(job.get("steps")) == 0 + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "TYPRUN=SCAN" + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None assert re.search( r'run with special job processing TYPRUN=SCAN', - repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + repr(rc.get("msg_txt")) ) - assert result.get("jobs")[0].get("ret_code").get("code") is None - assert result.get("jobs")[0].get("ret_code").get("msg") == "TYPRUN=SCAN" - assert result.get("jobs")[0].get("ret_code").get("msg_code") is None def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): @@ -1007,8 +1578,8 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_COPY) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="local", - wait_time_s=20, + remote_src=False, + wait_time=20, encoding={ "from": "UTF-8", "to": "IBM-1047" @@ -1018,18 +1589,48 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): # assert result.get("changed") is False # When running a job with TYPRUN=COPY, a copy of the JCL will be kept in the JES spool, so # effectively, the system is changed even though the job didn't run. - assert result.get("changed") is True - assert result.get("jobs")[0].get("job_id") is not None + assert result.get("changed") is False + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is None + assert len(job.get("steps")) == 0 + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "NOEXEC" + assert rc.get("code") == None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None assert re.search( - r'The job was run with TYPRUN=COPY.', - repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + r'NOEXEC.', + repr(rc.get("msg_txt")) ) - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get("jobs")[0].get("ret_code").get("msg") == 'TYPRUN=COPY' - assert result.get("jobs")[0].get("ret_code").get("msg_code") == '0000' - # assert result.get("jobs")[0].get("ret_code").get("code") is None - # assert result.get("jobs")[0].get("ret_code").get("msg") is None - # assert result.get("jobs")[0].get("ret_code").get("msg_code") is None def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): @@ -1038,22 +1639,47 @@ def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_HOLD) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="local", - wait_time_s=20, + remote_src=False, + wait_time=20, encoding={ "from": "UTF-8", "to": "IBM-1047" },) for result in results.contacted.values(): + print(result) assert result.get("changed") is False - assert result.get("jobs")[0].get("job_id") is not None + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is None + assert job.get("job_class") is not None + assert job.get("svc_class") is not None + assert job.get("system") is None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is None + assert len(job.get("steps")) == 0 + assert len(job.get("dds")) == 0 + + rc = job.get("ret_code") assert re.search( r'long running job', - repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + repr(rc.get("msg_txt")) ) - assert result.get("jobs")[0].get("ret_code").get("code") is None - assert result.get("jobs")[0].get("ret_code").get("msg") == "AC" - assert result.get("jobs")[0].get("ret_code").get("msg_code") is None + assert rc.get("code") is None + assert rc.get("msg") == "HOLD" + assert rc.get("msg_code") is None def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): @@ -1062,22 +1688,47 @@ def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_JCLHOLD) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="local", - wait_time_s=20, + remote_src=False, + wait_time=20, encoding={ "from": "UTF-8", "to": "IBM-1047" },) for result in results.contacted.values(): + print(result) assert result.get("changed") is False - assert result.get("jobs")[0].get("job_id") is not None + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is None + assert len(job.get("steps")) == 0 + assert len(job.get("dds")) == 0 + + rc = job.get("ret_code") assert re.search( r'long running job', - repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + repr(rc.get("msg_txt")) ) - assert result.get("jobs")[0].get("ret_code").get("code") is None - assert result.get("jobs")[0].get("ret_code").get("msg") == "AC" - assert result.get("jobs")[0].get("ret_code").get("msg_code") is None + assert rc.get("code") is None + assert rc.get("msg") == "HOLD" + assert rc.get("msg_code") is None @pytest.mark.parametrize("generation", ["0", "-1"]) @@ -1102,11 +1753,50 @@ def test_job_from_gdg_source(ansible_zos_module, generation): cmd="dcp '{0}/SAMPLE' '{1}'".format(temp_path, gds_name) ) - results = hosts.all.zos_job_submit(src=gds_name, location="data_set") + results = hosts.all.zos_job_submit(src=gds_name, remote_src=True) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=f"{source}(0)", state="absent") @@ -1125,10 +1815,12 @@ def test_inexistent_negative_gds(ansible_zos_module): # Only creating generation 0. hosts.all.zos_data_set(name=f"{source}(+1)", state="present", type="seq") - results = hosts.all.zos_job_submit(src=gds_name, location="data_set") + results = hosts.all.zos_job_submit(src=gds_name, remote_src=True) for result in results.contacted.values(): assert result.get("changed") is False - assert "was not found" in result.get("msg") + assert result.get("msg") is not None + assert result.get("failed") is True + assert re.search(r'was not found', repr(result.get("msg"))) finally: hosts.all.zos_data_set(name=f"{source}(0)", state="absent") hosts.all.zos_data_set(name=source, state="absent") @@ -1145,10 +1837,12 @@ def test_inexistent_positive_gds(ansible_zos_module): # Only creating generation 0. hosts.all.zos_data_set(name=gds_name, state="present", type="seq") - results = hosts.all.zos_job_submit(src=gds_name, location="data_set") + results = hosts.all.zos_job_submit(src=gds_name, remote_src=True) for result in results.contacted.values(): assert result.get("changed") is False - assert "was not found" in result.get("msg") + assert result.get("msg") is not None + assert result.get("failed") is True + assert re.search(r'was not found', repr(result.get("msg"))) finally: hosts.all.zos_data_set(name=f"{source}(0)", state="absent") hosts.all.zos_data_set(name=source, state="absent") @@ -1176,16 +1870,55 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): results = hosts.all.zos_job_submit( src=tmp_file.name, - location="local", - wait_time_s=15 + remote_src=False, + wait_time=15 ) for result in results.contacted.values(): # We shouldn't get an error now that ZOAU handles invalid/unprintable # UTF-8 chars correctly. - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") @@ -1218,7 +1951,7 @@ def test_job_submit_async(get_config): cut_python_path, python_version, tmp_file.name - )), + )), playbook.name )) @@ -1228,7 +1961,7 @@ def test_job_submit_async(get_config): ssh_key, user, python_path - )), + )), inventory.name )) From d0fcaa5a0735caf648aa38f22e7336c0690b2a35 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Tue, 5 Aug 2025 17:41:33 -0600 Subject: [PATCH 04/73] [Enhancement][zos_operator_action_query] Module interface update (#2218) * Added changes to the options in zos_operator_action_query * Modified tests for test_zos_operator_action_query regarding option names * Updated literal default value to true * Updated ocurrences of message to msg * Updated test with new msg_ instead of message_ * Updated unit tests and date * Updated date in unit tests * Updated zos_operator_action_query with aliases * Added a missing comma * Ensured that all values are always returned * Updated zos_operator_action_query * Added changelog * Corrected doc --- ...operator_action_query-interface-update.yml | 4 + plugins/modules/zos_operator_action_query.py | 118 ++++++------ .../test_zos_operator_action_query_func.py | 168 +++++++++++------- .../test_zos_operator_action_query_unit.py | 16 +- 4 files changed, 174 insertions(+), 132 deletions(-) create mode 100644 changelogs/fragments/2218-zos_operator_action_query-interface-update.yml diff --git a/changelogs/fragments/2218-zos_operator_action_query-interface-update.yml b/changelogs/fragments/2218-zos_operator_action_query-interface-update.yml new file mode 100644 index 0000000000..f26747e77f --- /dev/null +++ b/changelogs/fragments/2218-zos_operator_action_query-interface-update.yml @@ -0,0 +1,4 @@ +breaking_changes: + - zos_job_query - Option ``message_filter`` is deprecated in favor of ``msg_filter``. Option ``message_id`` is deprecated in favor of ``msg_id``. + Return value ``message_id`` is deprecated in favor of ``msg_id``. Return value ``message_text`` is deprecated in favor of ``msg_txt``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2218). \ No newline at end of file diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 525518a2f2..2b38e75aa1 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -30,6 +30,7 @@ - "Demetrios Dimatos (@ddimatos)" - "Ivan Moreno (@rexemin)" - "Rich Parker (@richp405)" + - "Fernando Flores (@fernandofloresg)" options: system: @@ -41,7 +42,7 @@ - A trailing asterisk, (*) wildcard is supported. type: str required: false - message_id: + msg_id: description: - Return outstanding messages requiring operator action awaiting a reply for a particular message identifier. @@ -50,6 +51,7 @@ - A trailing asterisk, (*) wildcard is supported. type: str required: false + aliases: [ message_id ] job_name: description: - Return outstanding messages requiring operator action awaiting a reply @@ -59,7 +61,7 @@ - A trailing asterisk, (*) wildcard is supported. type: str required: false - message_filter: + msg_filter: description: - Return outstanding messages requiring operator action awaiting a reply that match a regular expression (regex) filter. @@ -67,11 +69,12 @@ are returned regardless of their content. type: dict required: false + aliases: [ message_filter ] suboptions: filter: description: - Specifies the substring or regex to match to the outstanding messages, - see I(use_regex). + see I(literal). - All special characters in a filter string that are not a regex are escaped. - Valid Python regular expressions are supported. See L(the official documentation,https://docs.python.org/library/re.html) for more information. @@ -80,16 +83,16 @@ newline." required: True type: str - use_regex: + literal: description: - Indicates that the value for I(filter) is a regex or a string to match. - - If False, the module assumes that I(filter) is not a regex and - matches the I(filter) substring on the outstanding messages. - - If True, the module creates a regex from the I(filter) string and + - If False, the module creates a regex from the I(filter) string and matches it to the outstanding messages. + - If True, the module assumes that I(filter) is not a regex and + matches the I(filter) substring on the outstanding messages. required: False type: bool - default: False + default: True seealso: - module: zos_operator @@ -116,11 +119,11 @@ - name: Display all outstanding messages whose message id begin with dsi* zos_operator_action_query: - message_id: dsi* + msg_id: dsi* - name: Display all outstanding messages that have the text IMS READY in them zos_operator_action_query: - message_filter: + msg_filter: filter: IMS READY - name: Display all outstanding messages where the job name begins with 'mq', @@ -128,11 +131,11 @@ pattern 'IMS' zos_operator_action_query: job_name: mq* - message_id: dsi* + msg_id: dsi* system: mv29 - message_filter: + msg_filter: filter: ^.*IMS.*$ - use_regex: true + literal: true """ RETURN = r""" @@ -147,13 +150,13 @@ count: description: The total number of outstanding messages. - returned: on success + returned: always type: int sample: 12 actions: description: The list of the outstanding messages. - returned: success + returned: always type: list elements: dict contains: @@ -183,11 +186,11 @@ returned: on success type: str sample: STC01537 - message_text: + msg_txt: description: Content of the outstanding message requiring operator - action awaiting a reply. If I(message_filter) is set, - I(message_text) will be filtered accordingly. + action awaiting a reply. If I(msg_filter) is set, + I(msg_txt) will be filtered accordingly. returned: success type: str sample: "*399 HWSC0000I *IMS CONNECT READY* IM5HCONN" @@ -198,7 +201,7 @@ returned: success type: str sample: IM5HCONN - message_id: + msg_id: description: Message identifier for outstanding message requiring operator action awaiting a reply. @@ -212,18 +215,18 @@ "type": 'R', "system": 'MV27', "job_id": 'STC01537', - "message_text": '*399 HWSC0000I *IMS CONNECT READY* IM5HCONN', + "msg_txt": '*399 HWSC0000I *IMS CONNECT READY* IM5HCONN', "job_name": 'IM5HCONN', - "message_id": 'HWSC0000I' + "msg_id": 'HWSC0000I' }, { "number": '002', "type": 'R', "system": 'MV27', "job_id": 'STC01533', - "message_text": '*400 DFS3139I IMS INITIALIZED, AUTOMATIC RESTART PROCEEDING IM5H', + "msg_txt": '*400 DFS3139I IMS INITIALIZED, AUTOMATIC RESTART PROCEEDING IM5H', "job_name": 'IM5HCTRL', - "message_id": 'DFS3139I' + "msg_id": 'DFS3139I' } ] """ @@ -260,19 +263,20 @@ def run_module(): """ module_args = dict( system=dict(type="str", required=False), - message_id=dict(type="str", required=False), + msg_id=dict(type="str", required=False, aliases=['message_id']), job_name=dict(type="str", required=False), - message_filter=dict( + msg_filter=dict( type="dict", required=False, + aliases=['message_filter'], options=dict( filter=dict(type="str", required=True), - use_regex=dict(default=False, type="bool", required=False) + literal=dict(default=True, type="bool", required=False) ) ) ) - result = dict(changed=False) + result = dict(changed=False, count=0, actions=[]) module = AnsibleModule(argument_spec=module_args, supports_check_mode=False) requests = [] try: @@ -321,7 +325,7 @@ def run_module(): cmd="d r,a,jn", ) - merged_list = create_merge_list(cmd_result_a.message, cmd_result_b.message, new_params['message_filter']) + merged_list = create_merge_list(cmd_result_a.message, cmd_result_b.message, new_params['msg_filter']) requests = find_required_request(merged_list, new_params) if requests: result["count"] = len(requests) @@ -351,9 +355,9 @@ def parse_params(params): """ arg_defs = dict( system=dict(arg_type=system_type, required=False), - message_id=dict(arg_type=message_id_type, required=False), + msg_id=dict(arg_type=msg_id_type, required=False), job_name=dict(arg_type=job_name_type, required=False), - message_filter=dict(arg_type=message_filter_type, required=False) + msg_filter=dict(arg_type=msg_filter_type, required=False) ) parser = BetterArgParser(arg_defs) new_params = parser.parse_args(params) @@ -380,7 +384,7 @@ def system_type(arg_val, params): return arg_val.upper() -def message_id_type(arg_val, params): +def msg_id_type(arg_val, params): """Message id type. Parameters @@ -420,7 +424,7 @@ def job_name_type(arg_val, params): return arg_val.upper() -def message_filter_type(arg_val, params): +def msg_filter_type(arg_val, params): """Message filter type. Parameters @@ -442,12 +446,12 @@ def message_filter_type(arg_val, params): """ try: filter_text = arg_val.get("filter") - use_regex = arg_val.get("use_regex") + literal = arg_val.get("literal") - if use_regex: - raw_arg_val = r'{0}'.format(filter_text) - else: + if literal: raw_arg_val = r'^.*{0}.*$'.format(re.escape(filter_text)) + else: + raw_arg_val = r'{0}'.format(filter_text) re.compile(raw_arg_val) except re.error: @@ -503,7 +507,7 @@ def find_required_request(merged_list, params): return requests -def create_merge_list(message_a, message_b, message_filter): +def create_merge_list(msg_a, msg_b, msg_filter): """Merge the return lists that execute both 'd r,a,s' and 'd r,a,jn'. For example, if we have: 'd r,a,s' response like: "742 R MV28 JOB57578 &742 ARC0055A REPLY 'GO' OR 'CANCEL'" @@ -512,20 +516,20 @@ def create_merge_list(message_a, message_b, message_filter): Parameters ---------- - message_a : str + msg_a : str Result coming from command 'd r,a,s'. - message_b : str + msg_b : str Result coming from command 'd r,a,jn'. - message_filter : str + msg_filter : str Message filter. Returns ------- Union - Merge of the result of message_a and the result of message_b. + Merge of the result of msg_a and the result of msg_b. """ - list_a = parse_result_a(message_a, message_filter) - list_b = parse_result_b(message_b, message_filter) + list_a = parse_result_a(msg_a, msg_filter) + list_b = parse_result_b(msg_b, msg_filter) merged_list = merge_list(list_a, list_b) return merged_list @@ -546,15 +550,15 @@ def filter_requests(merged_list, params): Filtered list. """ system = params.get("system") - message_id = params.get("message_id") + msg_id = params.get("msg_id") job_name = params.get("job_name") newlist = merged_list if system: newlist = handle_conditions(newlist, "system", system) if job_name: newlist = handle_conditions(newlist, "job_name", job_name) - if message_id: - newlist = handle_conditions(newlist, "message_id", message_id) + if msg_id: + newlist = handle_conditions(newlist, "msg_id", msg_id) return newlist @@ -619,14 +623,14 @@ def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): return OperatorQueryResult(rc, stdout, stderr) -def match_raw_message(msg, message_filter): +def match_raw_message(msg, msg_filter): """Match raw message. Parameters ---------- msg : str Message to match. - message_filter : str + msg_filter : str Filter for the message. Return @@ -634,11 +638,11 @@ def match_raw_message(msg, message_filter): bool If the pattern matches msg. """ - pattern = re.compile(message_filter, re.DOTALL) + pattern = re.compile(msg_filter, re.DOTALL) return pattern.match(msg) -def parse_result_a(result, message_filter): +def parse_result_a(result, msg_filter): """parsing the result that coming from command 'd r,a,s', there are usually two formats: - line with job_id: 810 R MV2D JOB58389 &810 ARC0055A REPLY 'GO' OR 'CANCEL' @@ -649,7 +653,7 @@ def parse_result_a(result, message_filter): ---------- result : str Result coming from command 'd r,a,s'. - message_filter : str + msg_filter : str Message filter. Returns @@ -668,7 +672,7 @@ def parse_result_a(result, message_filter): ) for match in match_iter: # If there was a filter specified, we skip messages that do not match it. - if message_filter is not None and not match_raw_message(match.string, message_filter): + if msg_filter is not None and not match_raw_message(match.string, msg_filter): continue dict_temp = { @@ -679,13 +683,13 @@ def parse_result_a(result, message_filter): if match.group(4) != "": dict_temp["job_id"] = match.group(4) if match.group(5) != "": - dict_temp["message_text"] = match.group(5).strip() + dict_temp["msg_txt"] = match.group(5).strip() list.append(dict_temp) return list -def parse_result_b(result, message_filter): +def parse_result_b(result, msg_filter): """Parse the result that comes from command 'd r,a,jn', the main purpose to use this command is to get the job_name and message id, which is not included in 'd r,a,s' @@ -694,7 +698,7 @@ def parse_result_b(result, message_filter): ---------- result : str Result coming from command 'd r,a,jn'. - message_filter : str + msg_filter : str Message filter. Returns @@ -714,13 +718,13 @@ def parse_result_b(result, message_filter): for match in match_iter: # If there was a filter specified, we skip messages that do not match it. - if message_filter is not None and not match_raw_message(match.string, message_filter): + if msg_filter is not None and not match_raw_message(match.string, msg_filter): continue dict_temp = { "number": match.group(1), "job_name": match.group(2), - "message_id": match.group(3), + "msg_id": match.group(3), } # Sometimes 'job_name' will be null because the operator action is a diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index f8f521a286..a9de1c9074 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2024 +# Copyright (c) IBM Corporation 2019, 2025 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -24,7 +24,7 @@ def test_zos_operator_action_query_no_options(ansible_zos_module): results = hosts.all.zos_operator_action_query() try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: @@ -32,14 +32,16 @@ def test_zos_operator_action_query_no_options(ansible_zos_module): for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None -def test_zos_operator_action_query_option_message_id(ansible_zos_module): +def test_zos_operator_action_query_option_msg_id(ansible_zos_module): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") - results = hosts.all.zos_operator_action_query(message_id="IEE094D") + results = hosts.all.zos_operator_action_query(msg_id="IEE094D") try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: @@ -47,40 +49,46 @@ def test_zos_operator_action_query_option_message_id(ansible_zos_module): for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None -def test_zos_operator_action_query_option_message_id_invalid_abbreviation( +def test_zos_operator_action_query_option_msg_id_invalid_abbreviation( ansible_zos_module ): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") - results = hosts.all.zos_operator_action_query(message_id="IEE") + results = hosts.all.zos_operator_action_query(msg_id="IEE") try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass for result in results.contacted.values(): - assert not result.get("actions") + assert result.get("actions") is not None + assert result.get("count") is not None + assert result.get("changed") is not None -@pytest.mark.parametrize("message_id", ["IEE*", "*"]) -def test_zos_operator_action_query_option_message_id_regex( +@pytest.mark.parametrize("msg_id", ["IEE*", "*"]) +def test_zos_operator_action_query_option_msg_id_regex( ansible_zos_module, - message_id + msg_id ): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") - results = hosts.all.zos_operator_action_query(message_id=message_id) + results = hosts.all.zos_operator_action_query(msg_id=msg_id) try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None def test_zos_operator_action_query_option_system(ansible_zos_module): hosts = ansible_zos_module @@ -91,6 +99,8 @@ def test_zos_operator_action_query_option_system(ansible_zos_module): results = hosts.all.zos_operator_action_query(system=system_name) for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None def test_zos_operator_action_query_option_system_invalid_abbreviation( ansible_zos_module @@ -102,12 +112,14 @@ def test_zos_operator_action_query_option_system_invalid_abbreviation( system_name = result.get("stdout", "").strip() results = hosts.all.zos_operator_action_query(system=system_name[:-1]) for result in results.contacted.values(): - assert not result.get("actions") + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None -@pytest.mark.parametrize("message_id", ["IEE*", "IEE094D", "*"]) -def test_zos_operator_action_query_option_system_and_message_id( +@pytest.mark.parametrize("msg_id", ["IEE*", "IEE094D", "*"]) +def test_zos_operator_action_query_option_system_and_msg_id( ansible_zos_module, - message_id + msg_id ): hosts = ansible_zos_module sysinfo = hosts.all.shell(cmd="uname -n") @@ -115,7 +127,7 @@ def test_zos_operator_action_query_option_system_and_message_id( for result in sysinfo.contacted.values(): system_name = result.get("stdout", "").strip() results = hosts.all.zos_operator_action_query( - system=system_name, message_id=message_id + system=system_name, msg_id=msg_id ) for result in results.contacted.values(): assert result.get("actions") @@ -131,18 +143,20 @@ def test_zos_operator_action_query_option_system_regex(ansible_zos_module): system=system_name[:3] + "*") try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None -@pytest.mark.parametrize("message_id", ["IEE*", "IEE094D", "*"]) -def test_zos_operator_action_query_option_system_regex_and_message_id( +@pytest.mark.parametrize("msg_id", ["IEE*", "IEE094D", "*"]) +def test_zos_operator_action_query_option_system_regex_and_msg_id( ansible_zos_module, - message_id + msg_id ): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") @@ -151,17 +165,19 @@ def test_zos_operator_action_query_option_system_regex_and_message_id( for result in sysinfo.contacted.values(): system_name = result.get("stdout", " ").strip() results = hosts.all.zos_operator_action_query( - system=system_name[:3] + "*", message_id=message_id + system=system_name[:3] + "*", msg_id=msg_id ) try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None @pytest.mark.parametrize("system", ["", "OVER8CHARS", "--BADNM", "invalid-system"]) def test_zos_operator_action_query_invalid_option_system( @@ -171,31 +187,37 @@ def test_zos_operator_action_query_invalid_option_system( hosts = ansible_zos_module results = hosts.all.zos_operator_action_query(system=system) for result in results.contacted.values(): - assert result.get("actions") is None + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None -@pytest.mark.parametrize("message_id", ["IEE*", "IEE094D", "*"]) -def test_zos_operator_action_query_valid_message_id_invalid_option_system( +@pytest.mark.parametrize("msg_id", ["IEE*", "IEE094D", "*"]) +def test_zos_operator_action_query_valid_msg_id_invalid_option_system( ansible_zos_module, - message_id + msg_id ): hosts = ansible_zos_module results = hosts.all.zos_operator_action_query( - system="invalid-system", message_id=message_id + system="invalid-system", msg_id=msg_id ) for result in results.contacted.values(): - assert result.get("actions") is None + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None -@pytest.mark.parametrize("message_id", ["", "--BADNM", "invalid-message"]) -def test_zos_operator_action_query_invalid_option_message_id( +@pytest.mark.parametrize("msg_id", ["", "--BADNM", "invalid-message"]) +def test_zos_operator_action_query_invalid_option_msg_id( ansible_zos_module, - message_id + msg_id ): hosts = ansible_zos_module - results = hosts.all.zos_operator_action_query(message_id=message_id) + results = hosts.all.zos_operator_action_query(msg_id=msg_id) for result in results.contacted.values(): - assert result.get("actions") is None + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None -def test_zos_operator_action_query_valid_option_system_invalid_option_message_id( +def test_zos_operator_action_query_valid_option_system_invalid_option_msg_id( ansible_zos_module ): hosts = ansible_zos_module @@ -204,66 +226,72 @@ def test_zos_operator_action_query_valid_option_system_invalid_option_message_id for result in sysinfo.contacted.values(): system_name = result.get("stdout", "").strip() results = hosts.all.zos_operator_action_query( - system=system_name, message_id="invalid-message" + system=system_name, msg_id="invalid-message" ) for result in results.contacted.values(): - assert result.get("actions") is None + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None def test_zos_operator_action_query_invalid_option_job_name(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator_action_query( job_name="invalid-job-name") for result in results.contacted.values(): - assert result.get("actions") is None + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None @pytest.mark.parametrize( - "message_filter", + "msg_filter", [ {"filter": "DUMP"}, - {"filter": "DUMP", "use_regex": False}, - {"filter": "^.*DUMP.*$", "use_regex": True}, - {"filter": "^.*OPERAND\\(S\\).*$", "use_regex": True} + {"filter": "DUMP", "literal": True}, + {"filter": "^.*DUMP.*$", "literal": False}, + {"filter": "^.*OPERAND\\(S\\).*$", "literal": False} ] ) -def test_zos_operator_action_query_option_message_filter_one_match( +def test_zos_operator_action_query_option_msg_filter_one_match( ansible_zos_module, - message_filter + msg_filter ): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") results = hosts.all.zos_operator_action_query( - message_filter=message_filter) + msg_filter=msg_filter) try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None @pytest.mark.parametrize( - "message_filter", + "msg_filter", [ {"filter": "DUMP"}, - {"filter": "DUMP", "use_regex": False}, - {"filter": "^.*DUMP.*$", "use_regex": True}, - {"filter": "^.*OPERAND\\(S\\).*$", "use_regex": True} + {"filter": "DUMP", "literal": True}, + {"filter": "^.*DUMP.*$", "literal": False}, + {"filter": "^.*OPERAND\\(S\\).*$", "literal": False} ] ) -def test_zos_operator_action_query_option_message_filter_multiple_matches( +def test_zos_operator_action_query_option_msg_filter_multiple_matches( ansible_zos_module, - message_filter + msg_filter ): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") results = hosts.all.zos_operator_action_query( - message_filter=message_filter) + msg_filter=msg_filter) try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: @@ -271,38 +299,44 @@ def test_zos_operator_action_query_option_message_filter_multiple_matches( for result in results.contacted.values(): assert result.get("actions") assert len(result.get("actions")) > 1 + assert result.get("count") is not None + assert result.get("changed") is not None @pytest.mark.parametrize( - "message_filter", + "msg_filter", [ {"filter": "IMS"}, - {"filter": "IMS", "use_regex": False}, - {"filter": "^.*IMS.*$", "use_regex": True}, + {"filter": "IMS", "literal": True}, + {"filter": "^.*IMS.*$", "literal": False}, ] ) -def test_zos_operator_action_query_option_message_filter_no_match( +def test_zos_operator_action_query_option_msg_filter_no_match( ansible_zos_module, - message_filter + msg_filter ): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") results = hosts.all.zos_operator_action_query( - message_filter=message_filter) + msg_filter=msg_filter) try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass for result in results.contacted.values(): - assert not result.get("actions") + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None -def test_zos_operator_action_query_invalid_option_message_filter( +def test_zos_operator_action_query_invalid_option_msg_filter( ansible_zos_module ): hosts = ansible_zos_module results = hosts.all.zos_operator_action_query( - message_filter={"filter": "*DUMP", "use_regex": True}) + msg_filter={"filter": "*DUMP", "literal": False}) for result in results.contacted.values(): - assert result.get("actions") is None + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None diff --git a/tests/unit/test_zos_operator_action_query_unit.py b/tests/unit/test_zos_operator_action_query_unit.py index c69a89a20b..16ff8e9577 100644 --- a/tests/unit/test_zos_operator_action_query_unit.py +++ b/tests/unit/test_zos_operator_action_query_unit.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2025 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -32,18 +32,18 @@ dummy_dict2 = {"system": "mv2c"} -dummy_dict3 = {"message_id": "DFH*"} +dummy_dict3 = {"msg_id": "DFH*"} -dummy_dict4_uppercase = {"message_id": "DFH*", "system": "MV28"} +dummy_dict4_uppercase = {"msg_id": "DFH*", "system": "MV28"} -dummy_dict4_lowercase = {"message_id": "DFH*", "system": "mv28"} +dummy_dict4_lowercase = {"msg_id": "DFH*", "system": "mv28"} -dummy_dict5 = {"message_filter": {"filter": "^.*IMS.*$", "use_regex": True}} +dummy_dict5 = {"msg_filter": {"filter": "^.*IMS.*$", "use_regex": True}} -dummy_dict6 = {"system": "mv27", "message_id": "DFS*", "job_name": "IM5H*", "message_filter": {"filter": "IMS"}} +dummy_dict6 = {"system": "mv27", "msg_id": "DFS*", "job_name": "IM5H*", "msg_filter": {"filter": "IMS"}} -dummy_dict_invalid_message = {"message_id": "$$#$%#"} -dummy_dict_invalid_filter = {"message_filter": {"filter": "*IMS", "use_regex": True}} +dummy_dict_invalid_message = {"msg_id": "$$#$%#"} +dummy_dict_invalid_filter = {"msg_filter": {"filter": "*IMS", "use_regex": True}} dummy_dict_invalid_job_name = {"job_name": "IM5H123456"} dummy_dict_invalid_system = {"system": "mv2712345"} From df3440b6c6a8461806703cbf4181c006bcaff759 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Thu, 7 Aug 2025 09:51:35 -0600 Subject: [PATCH 05/73] Merge v1.15.0-beta.1 back to dev (#2223) * Release tasks v1.14.0 GA (#2189) * [v1.14.0] Avoid failures when default dir ~/.ansible/tmp/ is not previously created and fix failures when using become in zos_job_submit (#2109) * Updated plugins in zos_copy * Updated action zos_job_submit * Updated tests * Updated permissions * Added fix for content * Added changelogs * Updated zos_job_submit action plugin * Fixed sanity issue * Updated zos_copy * Started release process * Updated lifecylce * Updated meta * Updated zos_mount docs * Updated docs * Updated data set utils * Updated release notes * Updaded lifecycle * [Release] Merge release tasks for v1.14.1 to main (#2200) * [1.14.1][zos_copy] Fix permission denied when transferring file to managed node with non-root user (#2198) * Updated issue template * Added fix for wrong permissions set in zos_copy * Removed comment * Fixed sanity isseu * Added changelogs * Added changelog and release notes * Updated release notes * Modified release notes * Merged release tasks into staging (#2219) * updated galaxy.yml with versioning * updating versions and dates. * version summary block * changelog work * ac document run * release notes updated. * cleaned up antsibull run, verified new version is in changelog * correction to release notes * Fixed release notes * removed all the changelog fragments --------- Co-authored-by: Rich Parker Updated release notes * Merged from main --- .github/ISSUE_TEMPLATE/bug_issue.yml | 1 + .../ISSUE_TEMPLATE/collaboration_issue.yml | 1 + .github/ISSUE_TEMPLATE/doc_issue.yml | 1 + CHANGELOG.rst | 48 +++++++- README.md | 2 +- changelogs/.plugin-cache.yaml | 7 +- changelogs/changelog.yaml | 116 ++++++++++++++++++ ...30-Set_dynamic_volumes_for_volume_init.yml | 3 - .../fragments/2033-remove-dev-tools.yml | 3 - .../2039-documentation-zos_copy-opercmd.yml | 3 - ...ackup_restore-fixed_return_backup_name.yml | 7 -- .../2043-zos_mount-volume-size-resize.yml | 3 - .../fragments/2047-zos_fetch-update-docs.yml | 3 - ...kup_restore-added-return-values-in-doc.yml | 4 - .../2055-Zos_apf-shell-commands-to-api.yml | 3 - ...ng-cpu_time-execution_node-origin_node.yml | 9 -- ...a_set-Ensure-Dataset-Volume-Validation.yml | 15 --- .../2058-Add_sanity_ignore_for_2_18.yml | 3 - .../fragments/2059-Github-sanity-2-18-fix.yml | 4 - .../2060-zos_mount-skip_fsumf168_from_df.yml | 5 - .../fragments/2061-alias-support-zos_stat.yml | 3 - ...-zos_data_set-Removed-Extra-Validation.yml | 4 - ...073-zos_find-finding-migrated-datasets.yml | 6 - ...75-migrated-data-sets-support-zos_stat.yml | 4 - .../fragments/2079-become-use-zos_fetch.yml | 5 - ...2080-zos_lineinfile-fixed-json-parsing.yml | 7 -- .../2081-zos_archive-add-encoding-support.yml | 3 - ...access_to_the_first_and_second_columns.yml | 5 - .../2098-docs-migrated-data-sets-examples.yml | 9 -- ...00-zos_copy-Identical-gdg-copy-support.yml | 10 -- ...py-supporting-aliases-for-src-and-dest.yml | 3 - .../2105-zos_unarchive-encoding-support.yml | 4 - .../2111-update-zos_copy-block-size-docs.yml | 4 - .../2116-zos_archive-skip_encoding.yml | 4 - ...20-zos_lineinfile-Added-return-content.yml | 4 - ...ase_to_check_tmphlq_zos_backup_restore.yml | 3 - ...36-zos_unarchive_skip_encoding_support.yml | 4 - .../fragments/2137-zos_stat-new-fields.yml | 8 -- ...n_dependency_finder_with_other_command.yml | 3 - ...ting-pound-in-dataset-name-and content.yml | 3 - .../2192-zos_archive-revert_src_encoding.yml | 4 - ...g-for-volume-definition-in-zos_mvs_raw.yml | 7 -- docs/source/life-cycle.rst | 4 + docs/source/modules/zos_archive.rst | 84 +++++++++++++ docs/source/modules/zos_backup_restore.rst | 32 +++++ docs/source/modules/zos_copy.rst | 14 +++ docs/source/modules/zos_data_set.rst | 6 + docs/source/modules/zos_fetch.rst | 4 +- docs/source/modules/zos_find.rst | 40 +++++- docs/source/modules/zos_job_output.rst | 30 ++++- docs/source/modules/zos_job_query.rst | 32 ++++- docs/source/modules/zos_job_submit.rst | 34 ++++- docs/source/modules/zos_mvs_raw.rst | 108 ++++++++++++++++ docs/source/modules/zos_replace.rst | 42 ++++--- docs/source/modules/zos_script.rst | 1 - docs/source/modules/zos_unarchive.rst | 83 ++++++++++++- docs/source/release_notes.rst | 78 +++++++++++- galaxy.yml | 2 +- meta/ibm_zos_core_meta.yml | 2 +- 59 files changed, 731 insertions(+), 213 deletions(-) delete mode 100644 changelogs/fragments/2030-Set_dynamic_volumes_for_volume_init.yml delete mode 100644 changelogs/fragments/2033-remove-dev-tools.yml delete mode 100644 changelogs/fragments/2039-documentation-zos_copy-opercmd.yml delete mode 100644 changelogs/fragments/2040-zos_backup_restore-fixed_return_backup_name.yml delete mode 100644 changelogs/fragments/2043-zos_mount-volume-size-resize.yml delete mode 100644 changelogs/fragments/2047-zos_fetch-update-docs.yml delete mode 100644 changelogs/fragments/2049-zos_backup_restore-added-return-values-in-doc.yml delete mode 100644 changelogs/fragments/2055-Zos_apf-shell-commands-to-api.yml delete mode 100644 changelogs/fragments/2056-zos_job-modules-adding-cpu_time-execution_node-origin_node.yml delete mode 100644 changelogs/fragments/2057-zos_data_set&data_set-Ensure-Dataset-Volume-Validation.yml delete mode 100644 changelogs/fragments/2058-Add_sanity_ignore_for_2_18.yml delete mode 100644 changelogs/fragments/2059-Github-sanity-2-18-fix.yml delete mode 100644 changelogs/fragments/2060-zos_mount-skip_fsumf168_from_df.yml delete mode 100644 changelogs/fragments/2061-alias-support-zos_stat.yml delete mode 100644 changelogs/fragments/2068-zos_data_set-Removed-Extra-Validation.yml delete mode 100644 changelogs/fragments/2073-zos_find-finding-migrated-datasets.yml delete mode 100644 changelogs/fragments/2075-migrated-data-sets-support-zos_stat.yml delete mode 100644 changelogs/fragments/2079-become-use-zos_fetch.yml delete mode 100644 changelogs/fragments/2080-zos_lineinfile-fixed-json-parsing.yml delete mode 100644 changelogs/fragments/2081-zos_archive-add-encoding-support.yml delete mode 100644 changelogs/fragments/2086_Programs_fails_when_need_access_to_the_first_and_second_columns.yml delete mode 100644 changelogs/fragments/2098-docs-migrated-data-sets-examples.yml delete mode 100644 changelogs/fragments/2100-zos_copy-Identical-gdg-copy-support.yml delete mode 100644 changelogs/fragments/2103-zos_copy-supporting-aliases-for-src-and-dest.yml delete mode 100644 changelogs/fragments/2105-zos_unarchive-encoding-support.yml delete mode 100644 changelogs/fragments/2111-update-zos_copy-block-size-docs.yml delete mode 100644 changelogs/fragments/2116-zos_archive-skip_encoding.yml delete mode 100644 changelogs/fragments/2120-zos_lineinfile-Added-return-content.yml delete mode 100644 changelogs/fragments/2135-Test_case_to_check_tmphlq_zos_backup_restore.yml delete mode 100644 changelogs/fragments/2136-zos_unarchive_skip_encoding_support.yml delete mode 100644 changelogs/fragments/2137-zos_stat-new-fields.yml delete mode 100644 changelogs/fragments/2138-Allow_run_dependency_finder_with_other_command.yml delete mode 100644 changelogs/fragments/2153-zos_copy-supporting-pound-in-dataset-name-and content.yml delete mode 100644 changelogs/fragments/2192-zos_archive-revert_src_encoding.yml delete mode 100644 changelogs/fragments/2194-zos_mvs_raw-Supporting-for-volume-definition-in-zos_mvs_raw.yml diff --git a/.github/ISSUE_TEMPLATE/bug_issue.yml b/.github/ISSUE_TEMPLATE/bug_issue.yml index 8c91214198..c4f7378f02 100644 --- a/.github/ISSUE_TEMPLATE/bug_issue.yml +++ b/.github/ISSUE_TEMPLATE/bug_issue.yml @@ -33,6 +33,7 @@ body: description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: + - v1.15.0-beta.1 - v1.14.1 - v1.14.0 - v1.14.0-beta.1 diff --git a/.github/ISSUE_TEMPLATE/collaboration_issue.yml b/.github/ISSUE_TEMPLATE/collaboration_issue.yml index bfa9f457b8..79ab99aefb 100644 --- a/.github/ISSUE_TEMPLATE/collaboration_issue.yml +++ b/.github/ISSUE_TEMPLATE/collaboration_issue.yml @@ -42,6 +42,7 @@ body: description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: + - v1.15.0-beta.1 - v1.14.1 - v1.14.0 - v1.14.0-beta.1 diff --git a/.github/ISSUE_TEMPLATE/doc_issue.yml b/.github/ISSUE_TEMPLATE/doc_issue.yml index 13b3950f7e..f8f68e115d 100644 --- a/.github/ISSUE_TEMPLATE/doc_issue.yml +++ b/.github/ISSUE_TEMPLATE/doc_issue.yml @@ -34,6 +34,7 @@ body: description: Which version of z/OS Ansible core collection are you using. If you are unsure, review the [documentation](https://ibm.github.io/z_ansible_collections_doc/faqs/faqs.html#how-do-i-update-a-collection-to-the-latest-version). multiple: false options: + - v1.15.0-beta.1 - v1.14.1 - v1.14.0 - v1.14.0-beta.1 diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 956ab2d87c..eee3829d9a 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,12 +4,56 @@ ibm.ibm\_zos\_core Release Notes .. contents:: Topics -v1.14.1 -======= +v1.15.0-beta.1 +============== Release Summary --------------- +Release Date: '2025-07-30' +This changelog describes all changes made to the modules and plugins included +in this collection. The release date is the date the changelog is created. +For additional details such as required dependencies and availability review +the collections `release notes ` + +Minor Changes +------------- + +- zos_archive - Adds support for encoding before archiving files. (https://github.com/ansible-collections/ibm_zos_core/pull/2081) +- zos_archive - Adds support for reverting the encoding of a source's files after archiving them. (https://github.com/ansible-collections/ibm_zos_core/pull/2192) +- zos_archive - Adds support for skipping encoding in archive module. This allows users to skip encoding for certain files before archiving them. (https://github.com/ansible-collections/ibm_zos_core/pull/2116) +- zos_copy - Added support for british pound character usage in file content and data set names for both source and destination when copying. (https://github.com/ansible-collections/ibm_zos_core/pull/2153) +- zos_copy - Adds new option `identical_gdg_copy` in the module. This allows copying GDG generations from a source base to a destination base while preserving generation data set absolute names when the destination base does not exist prior to the copy. (https://github.com/ansible-collections/ibm_zos_core/pull/2100). +- zos_copy - Adds support of using alias names in src and dest parameters for PS, PDS and PDSE data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/2103) +- zos_fetch - Updated the documentation to correctly state what the default behavior of the module is. (https://github.com/ansible-collections/ibm_zos_core/pull/2047). +- zos_find - Adds functionality to find migrated data sets. - Adds functionality to find different types of data sets at the same time. (https://github.com/ansible-collections/ibm_zos_core/pull/2073). +- zos_job_output - Adds new fields cpu_time, origin_node and execution_node to response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). +- zos_job_query - Adds new fields cpu_time, origin_node and execution_node to response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). +- zos_job_submit - Adds new fields cpu_time, origin_node and execution_node to response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). +- zos_mvs_raw - Before this addition, you could not put anything in columns 1 or 2, were reserved for JCL processing. Change now allows add reserved_cols option and validate that the module get access to modify dd_content option base on the value, if not retain the previous behavior or work. (https://github.com/ansible-collections/ibm_zos_core/pull/2086) +- zos_mvs_raw - Adds support for volume data definition. (https://github.com/ansible-collections/ibm_zos_core/pull/2194) +- zos_stat - Added support to recall migrated data sets and return its attributes. (https://github.com/ansible-collections/ibm_zos_core/pull/2075) +- zos_stat - Adds new fields that describe the type of the resource that was queried. These new fields are `isfile`, `isdataset`, `isaggregate` and `isgdg`. (https://github.com/ansible-collections/ibm_zos_core/pull/2137) +- zos_stat - Adds support to query data sets using their aliases. (https://github.com/ansible-collections/ibm_zos_core/pull/2061) +- zos_stat - Module now returns whether the resource queried exists on the managed node with the `exists` field inside `stat`. (https://github.com/ansible-collections/ibm_zos_core/pull/2137) +- zos_unarchive - Added encoding support for the unarchive module. This allows users to encode the files after unarchiving them in a perticular encoding. (https://github.com/ansible-collections/ibm_zos_core/pull/2105) + +Bugfixes +-------- + +- zos_backup_restore - Return value `backup_name` was empty upon successful result. Fix now returns `backup_name` populated. (https://github.com/ansible-collections/ibm_zos_core/pull/2040). +- zos_data_set - Attempting to create a data set with the same name on a different volume did not work, nor did it report a failure. The fix now informs the user that if the data set is cataloged on a different volume, it needs to be uncataloged before using the data set module to create a new data set on a different volume. (https://github.com/ansible-collections/ibm_zos_core/pull/2057). +- zos_fetch - Previously, the use of `become` would result in a permissions error while trying to fetch a data set or a member. Fix now allows a user to escalate privileges when fetching resources. (https://github.com/ansible-collections/ibm_zos_core/pull/2079) +- zos_lineinfile - Return values ``return_content`` and ``backup_name`` were not always being returned. Fix now ensure that these values are always present in the module's response. (https://github.com/ansible-collections/ibm_zos_core/pull/2120) +- zos_lineinfile - The module would report a false negative when certain special characters where present in the `line` option. Fix now reports the successful operation. (https://github.com/ansible-collections/ibm_zos_core/pull/2080). +- zos_mount - FSUMF168 return in stderror means that the mount dataset wouldn't resolve. While this shows a catalog or volume issue, it should not impact our search for an existing mount. Added handling to the df call, so that FSUMF168 are ignored. (https://github.com/ansible-collections/ibm_zos_core/pull/2060). + +New Modules +----------- + +- ibm.ibm_zos_core.zos_replace - Replace all instances of a pattern within a file or data set. + +v1.14.1 Release Date: '2025-07-03' This changelog describes all changes made to the modules and plugins included in this collection. The release date is the date the changelog is created. diff --git a/README.md b/README.md index f7b8c813ac..7e84bb28b0 100644 --- a/README.md +++ b/README.md @@ -136,7 +136,7 @@ All releases will meet the following test criteria. * Python 3.12.x * IBM Open Enterprise SDK for Python 3.12.x * IBM Z Open Automation Utilities (ZOAU) 1.3.4.x -* z/OS V2R5 +* z/OS V2R5 or V3R1 ## Contributing diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index f6b22a4814..a9c741e33c 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -110,6 +110,11 @@ plugins: name: zos_ping namespace: '' version_added: 1.1.0 + zos_replace: + description: Replace all instances of a pattern within a file or data set. + name: zos_replace + namespace: '' + version_added: 1.15.0 zos_script: description: Run scripts in z/OS name: zos_script @@ -146,4 +151,4 @@ plugins: strategy: {} test: {} vars: {} -version: 1.14.1 +version: 1.15.0-beta.1 diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 60c49ee1c6..a73cb18c43 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -855,6 +855,122 @@ releases: - 2196-fix-copy-permission-issues.yml - v1.14.1_summary.yml release_date: '2025-07-01' + 1.15.0-beta.1: + changes: + bugfixes: + - zos_backup_restore - Return value `backup_name` was empty upon successful + result. Fix now returns `backup_name` populated. (https://github.com/ansible-collections/ibm_zos_core/pull/2040). + - zos_data_set - Attempting to create a data set with the same name on a different + volume did not work, nor did it report a failure. The fix now informs the + user that if the data set is cataloged on a different volume, it needs to + be uncataloged before using the data set module to create a new data set on + a different volume. (https://github.com/ansible-collections/ibm_zos_core/pull/2057). + - zos_fetch - Previously, the use of `become` would result in a permissions + error while trying to fetch a data set or a member. Fix now allows a user + to escalate privileges when fetching resources. (https://github.com/ansible-collections/ibm_zos_core/pull/2079) + - zos_lineinfile - Return values ``return_content`` and ``backup_name`` were + not always being returned. Fix now ensure that these values are always present + in the module's response. (https://github.com/ansible-collections/ibm_zos_core/pull/2120) + - zos_lineinfile - The module would report a false negative when certain special + characters where present in the `line` option. Fix now reports the successful + operation. (https://github.com/ansible-collections/ibm_zos_core/pull/2080). + - zos_mount - FSUMF168 return in stderror means that the mount dataset wouldn't + resolve. While this shows a catalog or volume issue, it should not impact + our search for an existing mount. Added handling to the df call, so that FSUMF168 + are ignored. (https://github.com/ansible-collections/ibm_zos_core/pull/2060). + minor_changes: + - zos_archive - Adds support for encoding before archiving files. (https://github.com/ansible-collections/ibm_zos_core/pull/2081) + - zos_archive - Adds support for reverting the encoding of a source's files + after archiving them. (https://github.com/ansible-collections/ibm_zos_core/pull/2192) + - zos_archive - Adds support for skipping encoding in archive module. This allows + users to skip encoding for certain files before archiving them. (https://github.com/ansible-collections/ibm_zos_core/pull/2116) + - zos_copy - Added support for british pound character usage in file content + and data set names for both source and destination when copying. (https://github.com/ansible-collections/ibm_zos_core/pull/2153) + - zos_copy - Adds new option `identical_gdg_copy` in the module. This allows + copying GDG generations from a source base to a destination base while preserving + generation data set absolute names when the destination base does not exist + prior to the copy. (https://github.com/ansible-collections/ibm_zos_core/pull/2100). + - zos_copy - Adds support of using alias names in src and dest parameters for + PS, PDS and PDSE data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/2103) + - zos_fetch - Updated the documentation to correctly state what the default + behavior of the module is. (https://github.com/ansible-collections/ibm_zos_core/pull/2047). + - zos_find - Adds functionality to find migrated data sets. - Adds functionality + to find different types of data sets at the same time. (https://github.com/ansible-collections/ibm_zos_core/pull/2073). + - zos_job_output - Adds new fields cpu_time, origin_node and execution_node + to response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). + - zos_job_query - Adds new fields cpu_time, origin_node and execution_node to + response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). + - zos_job_submit - Adds new fields cpu_time, origin_node and execution_node + to response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). + - zos_mvs_raw - Before this addition, you could not put anything in columns + 1 or 2, were reserved for JCL processing. Change now allows add reserved_cols + option and validate that the module get access to modify dd_content option + base on the value, if not retain the previous behavior or work. (https://github.com/ansible-collections/ibm_zos_core/pull/2086) + - zos_mvs_raw - Adds support for volume data definition. (https://github.com/ansible-collections/ibm_zos_core/pull/2194) + - zos_stat - Added support to recall migrated data sets and return its attributes. + (https://github.com/ansible-collections/ibm_zos_core/pull/2075) + - zos_stat - Adds new fields that describe the type of the resource that was + queried. These new fields are `isfile`, `isdataset`, `isaggregate` and `isgdg`. + (https://github.com/ansible-collections/ibm_zos_core/pull/2137) + - zos_stat - Adds support to query data sets using their aliases. (https://github.com/ansible-collections/ibm_zos_core/pull/2061) + - zos_stat - Module now returns whether the resource queried exists on the managed + node with the `exists` field inside `stat`. (https://github.com/ansible-collections/ibm_zos_core/pull/2137) + - zos_unarchive - Added encoding support for the unarchive module. This allows + users to encode the files after unarchiving them in a perticular encoding. + (https://github.com/ansible-collections/ibm_zos_core/pull/2105) + release_summary: 'Release Date: ''2025-07-30'' + + This changelog describes all changes made to the modules and plugins included + + in this collection. The release date is the date the changelog is created. + + For additional details such as required dependencies and availability review + + the collections `release notes ` + + ' + fragments: + - 2030-Set_dynamic_volumes_for_volume_init.yml + - 2033-remove-dev-tools.yml + - 2039-documentation-zos_copy-opercmd.yml + - 2040-zos_backup_restore-fixed_return_backup_name.yml + - 2043-zos_mount-volume-size-resize.yml + - 2047-zos_fetch-update-docs.yml + - 2049-zos_backup_restore-added-return-values-in-doc.yml + - 2055-Zos_apf-shell-commands-to-api.yml + - 2056-zos_job-modules-adding-cpu_time-execution_node-origin_node.yml + - 2057-zos_data_set&data_set-Ensure-Dataset-Volume-Validation.yml + - 2058-Add_sanity_ignore_for_2_18.yml + - 2059-Github-sanity-2-18-fix.yml + - 2060-zos_mount-skip_fsumf168_from_df.yml + - 2061-alias-support-zos_stat.yml + - 2068-zos_data_set-Removed-Extra-Validation.yml + - 2073-zos_find-finding-migrated-datasets.yml + - 2075-migrated-data-sets-support-zos_stat.yml + - 2079-become-use-zos_fetch.yml + - 2080-zos_lineinfile-fixed-json-parsing.yml + - 2081-zos_archive-add-encoding-support.yml + - 2086_Programs_fails_when_need_access_to_the_first_and_second_columns.yml + - 2098-docs-migrated-data-sets-examples.yml + - 2100-zos_copy-Identical-gdg-copy-support.yml + - 2103-zos_copy-supporting-aliases-for-src-and-dest.yml + - 2105-zos_unarchive-encoding-support.yml + - 2111-update-zos_copy-block-size-docs.yml + - 2116-zos_archive-skip_encoding.yml + - 2120-zos_lineinfile-Added-return-content.yml + - 2135-Test_case_to_check_tmphlq_zos_backup_restore.yml + - 2136-zos_unarchive_skip_encoding_support.yml + - 2137-zos_stat-new-fields.yml + - 2138-Allow_run_dependency_finder_with_other_command.yml + - 2153-zos_copy-supporting-pound-in-dataset-name-and content.yml + - 2192-zos_archive-revert_src_encoding.yml + - 2194-zos_mvs_raw-Supporting-for-volume-definition-in-zos_mvs_raw.yml + - v1.15.0-beta.1_summary.yml + modules: + - description: Replace all instances of a pattern within a file or data set. + name: zos_replace + namespace: '' + release_date: '2025-08-05' 1.2.1: changes: bugfixes: diff --git a/changelogs/fragments/2030-Set_dynamic_volumes_for_volume_init.yml b/changelogs/fragments/2030-Set_dynamic_volumes_for_volume_init.yml deleted file mode 100644 index 8e70654d58..0000000000 --- a/changelogs/fragments/2030-Set_dynamic_volumes_for_volume_init.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - test_zos_volume_init.py - Adds support dynamic volumes and allocations for testing. - (https://github.com/ansible-collections/ibm_zos_core/pull/2030). \ No newline at end of file diff --git a/changelogs/fragments/2033-remove-dev-tools.yml b/changelogs/fragments/2033-remove-dev-tools.yml deleted file mode 100644 index e1015c8644..0000000000 --- a/changelogs/fragments/2033-remove-dev-tools.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - ac - Removed development scripts from the repository. - (https://github.com/ansible-collections/ibm_zos_core/pull/2033) diff --git a/changelogs/fragments/2039-documentation-zos_copy-opercmd.yml b/changelogs/fragments/2039-documentation-zos_copy-opercmd.yml deleted file mode 100644 index d22c1c2725..0000000000 --- a/changelogs/fragments/2039-documentation-zos_copy-opercmd.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_copy - Removed the need of MVS.MCSOPER.ZOAU profile in zos_copy. - (https://github.com/ansible-collections/ibm_zos_core/pull/2039). diff --git a/changelogs/fragments/2040-zos_backup_restore-fixed_return_backup_name.yml b/changelogs/fragments/2040-zos_backup_restore-fixed_return_backup_name.yml deleted file mode 100644 index 631110940f..0000000000 --- a/changelogs/fragments/2040-zos_backup_restore-fixed_return_backup_name.yml +++ /dev/null @@ -1,7 +0,0 @@ -bugfixes: - - zos_backup_restore - Return value `backup_name` was empty upon successful result. - Fix now returns `backup_name` populated. - (https://github.com/ansible-collections/ibm_zos_core/pull/2040). -trivial: - - test_zos_backup_restore - added backup_name assertion for return values in the test suite. - (https://github.com/ansible-collections/ibm_zos_core/pull/2040). \ No newline at end of file diff --git a/changelogs/fragments/2043-zos_mount-volume-size-resize.yml b/changelogs/fragments/2043-zos_mount-volume-size-resize.yml deleted file mode 100644 index 552c7f9f2b..0000000000 --- a/changelogs/fragments/2043-zos_mount-volume-size-resize.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - test_zos_mount_func.py - Modifying the volume cylinder size, so test case does not fail due to volume space constraints. - (https://github.com/ansible-collections/ibm_zos_core/pull/2043). \ No newline at end of file diff --git a/changelogs/fragments/2047-zos_fetch-update-docs.yml b/changelogs/fragments/2047-zos_fetch-update-docs.yml deleted file mode 100644 index 4aa5ba03b3..0000000000 --- a/changelogs/fragments/2047-zos_fetch-update-docs.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_fetch - Updated the documentation to correctly state what the default behavior of the module is. - (https://github.com/ansible-collections/ibm_zos_core/pull/2047). \ No newline at end of file diff --git a/changelogs/fragments/2049-zos_backup_restore-added-return-values-in-doc.yml b/changelogs/fragments/2049-zos_backup_restore-added-return-values-in-doc.yml deleted file mode 100644 index 328b04bb7f..0000000000 --- a/changelogs/fragments/2049-zos_backup_restore-added-return-values-in-doc.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_backup_restore - Updated documentation on zos_backup_restore to notify about return - values of Backup and restore operation. - (https://github.com/ansible-collections/ibm_zos_core/pull/2049). diff --git a/changelogs/fragments/2055-Zos_apf-shell-commands-to-api.yml b/changelogs/fragments/2055-Zos_apf-shell-commands-to-api.yml deleted file mode 100644 index 7414081314..0000000000 --- a/changelogs/fragments/2055-Zos_apf-shell-commands-to-api.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - zos_apf - Updated the module to use the API instead of shell commands. - (https://github.com/ansible-collections/ibm_zos_core/pull/2055). \ No newline at end of file diff --git a/changelogs/fragments/2056-zos_job-modules-adding-cpu_time-execution_node-origin_node.yml b/changelogs/fragments/2056-zos_job-modules-adding-cpu_time-execution_node-origin_node.yml deleted file mode 100644 index 351a92fe81..0000000000 --- a/changelogs/fragments/2056-zos_job-modules-adding-cpu_time-execution_node-origin_node.yml +++ /dev/null @@ -1,9 +0,0 @@ -minor_changes: - - zos_job_submit - Adds new fields cpu_time, origin_node and execution_node to response. - (https://github.com/ansible-collections/ibm_zos_core/pull/2056). - - - zos_job_query - Adds new fields cpu_time, origin_node and execution_node to response. - (https://github.com/ansible-collections/ibm_zos_core/pull/2056). - - - zos_job_output - Adds new fields cpu_time, origin_node and execution_node to response. - (https://github.com/ansible-collections/ibm_zos_core/pull/2056). diff --git a/changelogs/fragments/2057-zos_data_set&data_set-Ensure-Dataset-Volume-Validation.yml b/changelogs/fragments/2057-zos_data_set&data_set-Ensure-Dataset-Volume-Validation.yml deleted file mode 100644 index 849751794b..0000000000 --- a/changelogs/fragments/2057-zos_data_set&data_set-Ensure-Dataset-Volume-Validation.yml +++ /dev/null @@ -1,15 +0,0 @@ -bugfixes: - - zos_data_set - Attempting to create a data set with the same name on a - different volume did not work, nor did it report a failure. - The fix now informs the user that if the data set is cataloged on a - different volume, it needs to be uncataloged before using the data - set module to create a new data set on a different volume. - (https://github.com/ansible-collections/ibm_zos_core/pull/2057). -trivial: - - data_set - added validation logic to compare requested volumes against - cataloged volumes. - Improved error messaging when volume conflicts occur during data set creation on - different volume. - (https://github.com/ansible-collections/ibm_zos_core/pull/2057). - - zos_data_set - Added documentation about potential false positive scenarios. - (https://github.com/ansible-collections/ibm_zos_core/pull/2057). diff --git a/changelogs/fragments/2058-Add_sanity_ignore_for_2_18.yml b/changelogs/fragments/2058-Add_sanity_ignore_for_2_18.yml deleted file mode 100644 index a20f7081f8..0000000000 --- a/changelogs/fragments/2058-Add_sanity_ignore_for_2_18.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - tests/sanity - Add sanity ignore for ansible 2.18. - (https://github.com/ansible-collections/ibm_zos_core/pull/2058). \ No newline at end of file diff --git a/changelogs/fragments/2059-Github-sanity-2-18-fix.yml b/changelogs/fragments/2059-Github-sanity-2-18-fix.yml deleted file mode 100644 index a2ea4dc33d..0000000000 --- a/changelogs/fragments/2059-Github-sanity-2-18-fix.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - ac-ansible-test-sanity.yml - Updated the ansible version to 2.18, - venv to 2.18, python to 3.12. - (https://github.com/ansible-collections/ibm_zos_core/pull/2059). \ No newline at end of file diff --git a/changelogs/fragments/2060-zos_mount-skip_fsumf168_from_df.yml b/changelogs/fragments/2060-zos_mount-skip_fsumf168_from_df.yml deleted file mode 100644 index a59aa5f2d7..0000000000 --- a/changelogs/fragments/2060-zos_mount-skip_fsumf168_from_df.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_mount - FSUMF168 return in stderror means that the mount dataset wouldn't resolve. - While this shows a catalog or volume issue, it should not impact our search for an existing mount. - Added handling to the df call, so that FSUMF168 are ignored. - (https://github.com/ansible-collections/ibm_zos_core/pull/2060). diff --git a/changelogs/fragments/2061-alias-support-zos_stat.yml b/changelogs/fragments/2061-alias-support-zos_stat.yml deleted file mode 100644 index b2887e5605..0000000000 --- a/changelogs/fragments/2061-alias-support-zos_stat.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_stat - Adds support to query data sets using their aliases. - (https://github.com/ansible-collections/ibm_zos_core/pull/2061) diff --git a/changelogs/fragments/2068-zos_data_set-Removed-Extra-Validation.yml b/changelogs/fragments/2068-zos_data_set-Removed-Extra-Validation.yml deleted file mode 100644 index ae7d84ae91..0000000000 --- a/changelogs/fragments/2068-zos_data_set-Removed-Extra-Validation.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_data_set - Removed Extra Validation step Error for multi volume cases because it is being handled - by ZOAU - (https://github.com/ansible-collections/ibm_zos_core/pull/2068). \ No newline at end of file diff --git a/changelogs/fragments/2073-zos_find-finding-migrated-datasets.yml b/changelogs/fragments/2073-zos_find-finding-migrated-datasets.yml deleted file mode 100644 index b96d9ba79a..0000000000 --- a/changelogs/fragments/2073-zos_find-finding-migrated-datasets.yml +++ /dev/null @@ -1,6 +0,0 @@ -minor_changes: - - zos_find - - Adds functionality to find migrated data sets. - - Adds functionality to find different types of data sets at the same time. - (https://github.com/ansible-collections/ibm_zos_core/pull/2073). - diff --git a/changelogs/fragments/2075-migrated-data-sets-support-zos_stat.yml b/changelogs/fragments/2075-migrated-data-sets-support-zos_stat.yml deleted file mode 100644 index b491f21365..0000000000 --- a/changelogs/fragments/2075-migrated-data-sets-support-zos_stat.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_stat - Added support to recall migrated data sets and return - its attributes. - (https://github.com/ansible-collections/ibm_zos_core/pull/2075) diff --git a/changelogs/fragments/2079-become-use-zos_fetch.yml b/changelogs/fragments/2079-become-use-zos_fetch.yml deleted file mode 100644 index 7fe5a864fe..0000000000 --- a/changelogs/fragments/2079-become-use-zos_fetch.yml +++ /dev/null @@ -1,5 +0,0 @@ -bugfixes: - - zos_fetch - Previously, the use of `become` would result in a permissions error - while trying to fetch a data set or a member. Fix now allows a user to escalate - privileges when fetching resources. - (https://github.com/ansible-collections/ibm_zos_core/pull/2079) diff --git a/changelogs/fragments/2080-zos_lineinfile-fixed-json-parsing.yml b/changelogs/fragments/2080-zos_lineinfile-fixed-json-parsing.yml deleted file mode 100644 index f5df8538e7..0000000000 --- a/changelogs/fragments/2080-zos_lineinfile-fixed-json-parsing.yml +++ /dev/null @@ -1,7 +0,0 @@ -bugfixes: - - zos_lineinfile - The module would report a false negative when certain special characters - where present in the `line` option. Fix now reports the successful operation. - (https://github.com/ansible-collections/ibm_zos_core/pull/2080). -trivial: - - test_zos_lineinfile_func - added test case to verify insertbefore functionality with regex pattern before Ansible block line. - (https://github.com/ansible-collections/ibm_zos_core/pull/2080). diff --git a/changelogs/fragments/2081-zos_archive-add-encoding-support.yml b/changelogs/fragments/2081-zos_archive-add-encoding-support.yml deleted file mode 100644 index ec39b90884..0000000000 --- a/changelogs/fragments/2081-zos_archive-add-encoding-support.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_archive - Adds support for encoding before archiving files. - (https://github.com/ansible-collections/ibm_zos_core/pull/2081) \ No newline at end of file diff --git a/changelogs/fragments/2086_Programs_fails_when_need_access_to_the_first_and_second_columns.yml b/changelogs/fragments/2086_Programs_fails_when_need_access_to_the_first_and_second_columns.yml deleted file mode 100644 index ae4786ac64..0000000000 --- a/changelogs/fragments/2086_Programs_fails_when_need_access_to_the_first_and_second_columns.yml +++ /dev/null @@ -1,5 +0,0 @@ -minor_changes: - - zos_mvs_raw - Before this addition, you could not put anything in columns 1 or 2, were reserved for JCL processing. - Change now allows add reserved_cols option and validate that the module get access to modify dd_content option - base on the value, if not retain the previous behavior or work. - (https://github.com/ansible-collections/ibm_zos_core/pull/2086) \ No newline at end of file diff --git a/changelogs/fragments/2098-docs-migrated-data-sets-examples.yml b/changelogs/fragments/2098-docs-migrated-data-sets-examples.yml deleted file mode 100644 index 6b4ec0604d..0000000000 --- a/changelogs/fragments/2098-docs-migrated-data-sets-examples.yml +++ /dev/null @@ -1,9 +0,0 @@ -trivial: - - zos_copy - Added a note to the documentation redirecting users to other - modules to see how to recall migrated data sets before trying to copy - them. - (https://github.com/ansible-collections/ibm_zos_core/pull/2098) - - zos_mvs_raw - Added an example for how to recall a migrated data set. - (https://github.com/ansible-collections/ibm_zos_core/pull/2098) - - zos_tso_cmd - Added an example for how to recall a migrated data set. - (https://github.com/ansible-collections/ibm_zos_core/pull/2098) diff --git a/changelogs/fragments/2100-zos_copy-Identical-gdg-copy-support.yml b/changelogs/fragments/2100-zos_copy-Identical-gdg-copy-support.yml deleted file mode 100644 index 753e1fec8e..0000000000 --- a/changelogs/fragments/2100-zos_copy-Identical-gdg-copy-support.yml +++ /dev/null @@ -1,10 +0,0 @@ -minor_changes: - - zos_copy - Adds new option `identical_gdg_copy` in the module. - This allows copying GDG generations from a source base to a destination base - while preserving generation data set absolute names when the destination base does not exist prior to the copy. - (https://github.com/ansible-collections/ibm_zos_core/pull/2100). -trivial: - - test_zos_copy_func - Added a test case to ensure GDG generation names are preserved - when copying with identical_gdg_copy is true and the destination base is non-existent. - (https://github.com/ansible-collections/ibm_zos_core/pull/2100). - diff --git a/changelogs/fragments/2103-zos_copy-supporting-aliases-for-src-and-dest.yml b/changelogs/fragments/2103-zos_copy-supporting-aliases-for-src-and-dest.yml deleted file mode 100644 index 1099eab4aa..0000000000 --- a/changelogs/fragments/2103-zos_copy-supporting-aliases-for-src-and-dest.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_copy - Adds support of using alias names in src and dest parameters for PS, PDS and PDSE data sets. - (https://github.com/ansible-collections/ibm_zos_core/pull/2103) diff --git a/changelogs/fragments/2105-zos_unarchive-encoding-support.yml b/changelogs/fragments/2105-zos_unarchive-encoding-support.yml deleted file mode 100644 index 4dee443cb3..0000000000 --- a/changelogs/fragments/2105-zos_unarchive-encoding-support.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_unarchive - Added encoding support for the unarchive module. - This allows users to encode the files after unarchiving them in a perticular encoding. - (https://github.com/ansible-collections/ibm_zos_core/pull/2105) \ No newline at end of file diff --git a/changelogs/fragments/2111-update-zos_copy-block-size-docs.yml b/changelogs/fragments/2111-update-zos_copy-block-size-docs.yml deleted file mode 100644 index 19c62e985a..0000000000 --- a/changelogs/fragments/2111-update-zos_copy-block-size-docs.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_copy - Updated module documentation to reflect that binary destination data sets - get allocated with a block size of 32720, instead of 32760. - (https://github.com/ansible-collections/ibm_zos_core/pull/2111) diff --git a/changelogs/fragments/2116-zos_archive-skip_encoding.yml b/changelogs/fragments/2116-zos_archive-skip_encoding.yml deleted file mode 100644 index 92b2b6b7cc..0000000000 --- a/changelogs/fragments/2116-zos_archive-skip_encoding.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_archive - Adds support for skipping encoding in archive module. - This allows users to skip encoding for certain files before archiving them. - (https://github.com/ansible-collections/ibm_zos_core/pull/2116) \ No newline at end of file diff --git a/changelogs/fragments/2120-zos_lineinfile-Added-return-content.yml b/changelogs/fragments/2120-zos_lineinfile-Added-return-content.yml deleted file mode 100644 index 9cda1bebbe..0000000000 --- a/changelogs/fragments/2120-zos_lineinfile-Added-return-content.yml +++ /dev/null @@ -1,4 +0,0 @@ -bugfixes: - - zos_lineinfile - Return values ``return_content`` and ``backup_name`` were not always being returned. - Fix now ensure that these values are always present in the module's response. - (https://github.com/ansible-collections/ibm_zos_core/pull/2120) \ No newline at end of file diff --git a/changelogs/fragments/2135-Test_case_to_check_tmphlq_zos_backup_restore.yml b/changelogs/fragments/2135-Test_case_to_check_tmphlq_zos_backup_restore.yml deleted file mode 100644 index 90afa97b9f..0000000000 --- a/changelogs/fragments/2135-Test_case_to_check_tmphlq_zos_backup_restore.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - test_zos_backup_restore - Added a test case to ensure tmphlq is tested in the module. - (https://github.com/ansible-collections/ibm_zos_core/pull/2135). diff --git a/changelogs/fragments/2136-zos_unarchive_skip_encoding_support.yml b/changelogs/fragments/2136-zos_unarchive_skip_encoding_support.yml deleted file mode 100644 index 44e57a29cb..0000000000 --- a/changelogs/fragments/2136-zos_unarchive_skip_encoding_support.yml +++ /dev/null @@ -1,4 +0,0 @@ -trivial: - - zos_unarchive - Adds support for skipping encoding in zos_unarchive module. - This allows users to skip encoding for certain files after unarchiving them. - (https://github.com/ansible-collections/ibm_zos_core/pull/2136) \ No newline at end of file diff --git a/changelogs/fragments/2137-zos_stat-new-fields.yml b/changelogs/fragments/2137-zos_stat-new-fields.yml deleted file mode 100644 index 68240e63e6..0000000000 --- a/changelogs/fragments/2137-zos_stat-new-fields.yml +++ /dev/null @@ -1,8 +0,0 @@ -minor_changes: - - zos_stat - Adds new fields that describe the type of the resource - that was queried. These new fields are `isfile`, `isdataset`, - `isaggregate` and `isgdg`. - (https://github.com/ansible-collections/ibm_zos_core/pull/2137) - - zos_stat - Module now returns whether the resource queried exists - on the managed node with the `exists` field inside `stat`. - (https://github.com/ansible-collections/ibm_zos_core/pull/2137) diff --git a/changelogs/fragments/2138-Allow_run_dependency_finder_with_other_command.yml b/changelogs/fragments/2138-Allow_run_dependency_finder_with_other_command.yml deleted file mode 100644 index aa13448515..0000000000 --- a/changelogs/fragments/2138-Allow_run_dependency_finder_with_other_command.yml +++ /dev/null @@ -1,3 +0,0 @@ -trivial: - - test/dependencyfinder.py - Updated use of git command to run a base command of git library. - (https://github.com/ansible-collections/ibm_zos_core/pull/2138) \ No newline at end of file diff --git a/changelogs/fragments/2153-zos_copy-supporting-pound-in-dataset-name-and content.yml b/changelogs/fragments/2153-zos_copy-supporting-pound-in-dataset-name-and content.yml deleted file mode 100644 index 6a458a3541..0000000000 --- a/changelogs/fragments/2153-zos_copy-supporting-pound-in-dataset-name-and content.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_copy - Added support for british pound character usage in file content and data set names for both source and destination when copying. - (https://github.com/ansible-collections/ibm_zos_core/pull/2153) diff --git a/changelogs/fragments/2192-zos_archive-revert_src_encoding.yml b/changelogs/fragments/2192-zos_archive-revert_src_encoding.yml deleted file mode 100644 index 92ce12c38b..0000000000 --- a/changelogs/fragments/2192-zos_archive-revert_src_encoding.yml +++ /dev/null @@ -1,4 +0,0 @@ -minor_changes: - - zos_archive - Adds support for reverting the encoding of a source's - files after archiving them. - (https://github.com/ansible-collections/ibm_zos_core/pull/2192) \ No newline at end of file diff --git a/changelogs/fragments/2194-zos_mvs_raw-Supporting-for-volume-definition-in-zos_mvs_raw.yml b/changelogs/fragments/2194-zos_mvs_raw-Supporting-for-volume-definition-in-zos_mvs_raw.yml deleted file mode 100644 index ebf3a5bbf9..0000000000 --- a/changelogs/fragments/2194-zos_mvs_raw-Supporting-for-volume-definition-in-zos_mvs_raw.yml +++ /dev/null @@ -1,7 +0,0 @@ -minor_changes: - - zos_mvs_raw - Adds support for volume data definition. - (https://github.com/ansible-collections/ibm_zos_core/pull/2194) - -trivial: - - test_zos_mvs_raw_func - added test cases to verify volume definition functionality in zos_mvs_raw module. - (https://github.com/ansible-collections/ibm_zos_core/pull/2194). diff --git a/docs/source/life-cycle.rst b/docs/source/life-cycle.rst index 59f981fd6b..23f04a89bc 100644 --- a/docs/source/life-cycle.rst +++ b/docs/source/life-cycle.rst @@ -27,6 +27,8 @@ its critical dates, and which type of support it's currently eligible for. +------------+----------------+-----------------------+------------------+-------------------+-------------------------+ | Version | Status | Changelogs | GA Date | EOL Date | Life Cycle Phase | +============+================+=======================+==================+===================+=========================+ +| 1.15.x | In preview | `1.15.x changelogs`_ | TBD | TBD | Beta phase | ++------------+----------------+-----------------------+------------------+-------------------+-------------------------+ | 1.14.x | Released | `1.14.x changelogs`_ | 30 June 2025 | 30 June 2027 | `Full support`_ | +------------+----------------+-----------------------+------------------+-------------------+-------------------------+ | 1.13.x | Released | `1.13.x changelogs`_ | 31 March 2025 | 31 March 2027 | `Full support`_ | @@ -41,6 +43,8 @@ its critical dates, and which type of support it's currently eligible for. .. ............................................................................. .. Global Links .. ............................................................................. +.. _1.15.x changelogs: + https://github.com/ansible-collections/ibm_zos_core/blob/v1.15.0-beta.1/CHANGELOG.rst .. _1.14.x changelogs: https://github.com/ansible-collections/ibm_zos_core/blob/v1.14.0/CHANGELOG.rst .. _1.13.x changelogs: diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index 1fd812edfd..2a51654019 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -339,6 +339,46 @@ force | **default**: False +encoding + Specifies the character encoding conversion to be applied to the source files before archiving. + + Supported character sets rely on the charset conversion utility ``iconv`` version the most common character sets are supported. + + After conversion the files are stored in same location and name as src and the same src is taken in consideration for archive. + + Source files will be converted to the new encoding and will not be restored to their original encoding. + + If encoding fails for any file in a set of multiple files, an exception will be raised and archiving will be skipped. + + The original files in ``src`` will be converted. The module will revert the encoding conversion after a successful archive, but no backup will be created. If you need to encode using a backup and then archive take a look at `zos_encode <./zos_encode.html>`_ module. + + | **required**: False + | **type**: dict + + + from + The character set of the source *src*. + + | **required**: False + | **type**: str + + + to + The destination *dest* character set for the files to be written as. + + | **required**: False + | **type**: str + + + skip_encoding + List of names to skip encoding before archiving. This is only used if *encoding* is set, otherwise is ignored. + + | **required**: False + | **type**: list + | **elements**: str + + + Attributes @@ -428,6 +468,32 @@ Examples format_options: use_adrdssu: true + - name: Encode the source data set into Latin-1 before archiving into a terse data set + zos_archive: + src: "USER.ARCHIVE.TEST" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + encoding: + from: IBM-1047 + to: ISO8859-1 + + - name: Encode and archive multiple data sets but skip encoding for a few. + zos_archive: + src: + - "USER.ARCHIVE1.TEST" + - "USER.ARCHIVE2.TEST" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + format_options: + use_adrdssu: true + encoding: + from: IBM-1047 + to: ISO8859-1 + skip_encoding: + - "USER.ARCHIVE2.TEST" + @@ -514,3 +580,21 @@ expanded_exclude_sources | **returned**: always | **type**: list +encoded + List of files or data sets that were successfully encoded. + + | **returned**: success + | **type**: list + +failed_on_encoding + List of files or data sets that were failed while encoding. + + | **returned**: success + | **type**: list + +skipped_encoding_targets + List of files or data sets that were skipped while encoding. + + | **returned**: success + | **type**: list + diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index e1671b137e..26fc889d37 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -379,3 +379,35 @@ Notes +Return Values +------------- + + +changed + Indicates if the operation made changes. + + ``true`` when backup/restore was successful, ``false`` otherwise. + + | **returned**: always + | **type**: bool + | **sample**: + + .. code-block:: json + + true + +backup_name + The USS file name or data set name that was used as a backup. + + Matches the *backup_name* parameter provided as input. + + | **returned**: always + | **type**: str + | **sample**: /u/oeusr03/my_backup.dzp + +message + Returns any important messages about the modules execution, if any. + + | **returned**: always + | **type**: str + diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 99a8688b5b..5fe5e565f5 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -44,6 +44,16 @@ asa_text | **default**: False +identical_gdg_copy + If set to ``true``, and the destination GDG does not exist, the module will copy the source GDG to the destination GDG with identical GDS absolute names. + + If set to ``false``, the copy will be done as a normal copy, without preserving the source GDG absolute names. + + | **required**: False + | **type**: bool + | **default**: False + + backup Specifies whether a backup of the destination should be created before copying data. @@ -89,6 +99,8 @@ dest ``dest`` can be a USS file, directory or MVS data set name. + ``dest`` can be a alias name of a PS, PDS or PDSE data set. + If ``dest`` has missing parent directories, they will be created. If ``dest`` is a nonexistent USS file, it will be created. @@ -294,6 +306,8 @@ remote_src src Path to a file/directory or name of a data set to copy to remote z/OS system. + ``src`` can be a alias name of a PS, PDS or PDSE data set. + If ``remote_src`` is true, then ``src`` must be the path to a Unix System Services (USS) file, name of a data set, or data set member. If ``src`` is a local path or a USS path, it can be absolute or relative. diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index e7059e4d84..1f8b6e9c25 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -89,6 +89,12 @@ state If *state=uncataloged* and the data set is found, the data set is uncataloged, module completes successfully with *changed=True*. + If *state=present*, the data set is already cataloged and *volumes* is provided, the module will compare the volumes where it is cataloged against the provided *volumes*. If they don't match, the module will fail with an error indicating the data set is cataloged on a different volume. To resolve this, you must first uncatalog the data set before creating it on the new volume. + + + If *state=present*, the data set is already cataloged, *volumes* is provided, and the volumes match exactly, no action is taken and the module completes successfully with *changed=False*. + + | **required**: False | **type**: str | **default**: present diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 15b7db0ed3..8a341dfcdc 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -67,11 +67,11 @@ validate_checksum flat - Override the default behavior of appending hostname/path/to/file to the destination. If set to "true", the file or data set will be fetched to the destination directory without appending remote hostname to the destination. + If set to "true", override the default behavior of appending hostname/path/to/file to the destination, instead the file or data set will be fetched to the destination directory without appending remote hostname to the destination. | **required**: False | **type**: bool - | **default**: true + | **default**: false is_binary diff --git a/docs/source/modules/zos_find.rst b/docs/source/modules/zos_find.rst index 79a0b6be73..1c3d5222c1 100644 --- a/docs/source/modules/zos_find.rst +++ b/docs/source/modules/zos_find.rst @@ -115,7 +115,7 @@ pds_patterns resource_type - The type of resource to search. + The types of resources to search. ``nonvsam`` refers to one of SEQ, LIBRARY (PDSE), PDS, LARGE, BASIC, EXTREQ, or EXTPREF. @@ -123,10 +123,25 @@ resource_type ``gdg`` refers to Generation Data Groups. The module searches based on the GDG base name. + ``migrated`` refers to listing migrated datasets. Only ``excludes`` and ``migrated_type`` options can be used along with this option. The module only searches based on dataset patterns. + | **required**: False - | **type**: str + | **type**: list + | **elements**: str | **default**: nonvsam - | **choices**: nonvsam, cluster, data, index, gdg + | **choices**: nonvsam, cluster, data, index, gdg, migrated + + +migrated_type + A migrated data set related attribute, only valid when ``resource_type=migrated``. + + If provided, will search for only those types of migrated datasets. + + | **required**: False + | **type**: list + | **elements**: str + | **default**: ['cluster', 'data', 'index', 'nonvsam'] + | **choices**: nonvsam, cluster, data, index volume @@ -231,7 +246,7 @@ Examples zos_find: patterns: 'IMS.LIB.*' contains: 'hello' - excludes: '*.TEST' + excludes: '.*TEST' - name: Find all members starting with characters 'TE' in a given list of PDS patterns zos_find: @@ -253,17 +268,28 @@ Examples zos_find: patterns: - USER.* - resource_type: cluster + resource_type: + - 'cluster' - name: Find all Generation Data Groups starting with the word 'USER' and specific GDG attributes. zos_find: patterns: - USER.* - resource_type: gdg + resource_type: + - 'gdg' limit: 30 scratch: true purge: true + - name: Find all migrated and nonvsam data sets starting with the word 'USER' + zos_find: + patterns: + - USER.* + resource_type: + - 'migrated' + migrated_type: + - 'nonvsam' + @@ -281,6 +307,8 @@ Notes When searching for content within data sets, only non-binary content is considered. + As a migrated data set's information can't be retrieved without recalling it first, other options besides ``excludes`` and ``migrated_type`` are not supported. + See Also diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index b1c2f22dc2..c58610d44e 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -124,6 +124,7 @@ jobs { "class": "R", "content_type": "JOB", + "cpu_time": 1414, "ddnames": [ { "byte_count": "775", @@ -232,10 +233,12 @@ jobs } ], "duration": 0, + "execution_node": "STL1", "execution_time": "00:00:03", "job_class": "R", "job_id": "JOB00134", "job_name": "HELLO", + "origin_node": "STL1", "owner": "OMVSADM", "priority": "1", "program_name": "IEBGENER", @@ -252,7 +255,8 @@ jobs } ] }, - "subsystem": "STL1" + "subsystem": "STL1", + "system": "STL1" } ] @@ -268,12 +272,36 @@ jobs | **type**: str | **sample**: HELLO + system + The job entry system that MVS uses to do work. + + | **type**: str + | **sample**: STL1 + subsystem The job entry subsystem that MVS uses to do work. | **type**: str | **sample**: STL1 + cpu_time + Sum of the CPU time used by each job step, in microseconds. + + | **type**: int + | **sample**: 5 + + execution_node + Execution node that picked the job and executed it. + + | **type**: str + | **sample**: STL1 + + origin_node + Origin node that submitted the job. + + | **type**: str + | **sample**: STL1 + class Identifies the data set used in a system output data set, usually called a sysout data set. diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index c9d0275b4b..38cea61e34 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -153,27 +153,35 @@ jobs { "asid": 0, "content_type": "JOB", + "cpu_time": 1414, "creation_date": "2023-05-03", "creation_time": "12:13:00", + "execution_node": "STL1", "execution_time": "00:00:02", "job_class": "K", "job_id": "JOB01427", "job_name": "LINKJOB", + "origin_node": "STL1", "owner": "ADMIN", "priority": 1, "queue_position": 3, "ret_code": "null", - "svc_class": "?" + "subsystem": "STL1", + "svc_class": "?", + "system": "STL1" }, { "asid": 4, "content_type": "JOB", + "cpu_time": 1414, "creation_date": "2023-05-03", "creation_time": "12:14:00", + "execution_node": "STL1", "execution_time": "00:00:03", "job_class": "A", "job_id": "JOB16577", "job_name": "LINKCBL", + "origin_node": "STL1", "owner": "ADMIN", "priority": 0, "queue_position": 0, @@ -181,7 +189,9 @@ jobs "code": "null", "msg": "CANCELED" }, - "svc_class": "E" + "subsystem": "STL1", + "svc_class": "E", + "system": "STL1" } ] @@ -233,6 +243,24 @@ jobs | **type**: str | **sample**: STL1 + cpu_time + Sum of the CPU time used by each job step, in microseconds. + + | **type**: int + | **sample**: 5 + + execution_node + Execution node that picked the job and executed it. + + | **type**: str + | **sample**: STL1 + + origin_node + Origin node that submitted the job. + + | **type**: str + | **sample**: STL1 + ret_code Return code output collected from job log. diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 845478efff..0468e85df1 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -253,7 +253,6 @@ template_parameters | **default**: \\n | **choices**: \\n, \\r, \\r\\n - auto_reload Whether to reload a template file when it has changed after the task has started. @@ -383,6 +382,7 @@ jobs "asid": 0, "class": "K", "content_type": "JOB", + "cpu_time": 1, "creation_date": "2023-05-03", "creation_time": "12:13:00", "ddnames": [ @@ -579,10 +579,12 @@ jobs "stepname": "DLORD6" } ], + "execution_node": "STL1", "execution_time": "00:00:10", "job_class": "K", "job_id": "JOB00361", "job_name": "DBDGEN00", + "origin_node": "STL1", "owner": "OMVSADM", "priority": 1, "program_name": "IEBGENER", @@ -858,4 +860,34 @@ jobs | **type**: str | **sample**: IEBGENER + system + The job entry system that MVS uses to do work. + + | **type**: str + | **sample**: STL1 + + subsystem + The job entry subsystem that MVS uses to do work. + + | **type**: str + | **sample**: STL1 + + cpu_time + Sum of the CPU time used by each job step, in microseconds. + + | **type**: int + | **sample**: 5 + + execution_node + Execution node that picked the job and executed it. + + | **type**: str + | **sample**: STL1 + + origin_node + Origin node that submitted the job. + + | **type**: str + | **sample**: STL1 + diff --git a/docs/source/modules/zos_mvs_raw.rst b/docs/source/modules/zos_mvs_raw.rst index e01dd0cc87..71ae7f8b01 100644 --- a/docs/source/modules/zos_mvs_raw.rst +++ b/docs/source/modules/zos_mvs_raw.rst @@ -682,6 +682,14 @@ dds | **type**: raw + reserved_cols + Determines how many columns at the beginning of the content are reserved with empty spaces. + + | **required**: False + | **type**: int + | **default**: 2 + + return_content Determines how content should be returned to the user. @@ -818,6 +826,45 @@ dds + dd_volume + Use *dd_volume* to specify the volume to use in the DD statement. + + | **required**: False + | **type**: dict + + + dd_name + The DD name. + + | **required**: True + | **type**: str + + + volume_name + The volume serial number. + + | **required**: True + | **type**: str + + + unit + Device type for the volume. + + This option is case sensitive. + + | **required**: True + | **type**: str + + + disposition + *disposition* indicates the status of a data set. + + | **required**: True + | **type**: str + | **choices**: new, shr, mod, old + + + dd_concat *dd_concat* is used to specify a data set concatenation. @@ -1407,6 +1454,14 @@ dds | **type**: raw + reserved_cols + Determines how many columns at the beginning of the content are reserved with empty spaces. + + | **required**: False + | **type**: int + | **default**: 2 + + return_content Determines how content should be returned to the user. @@ -1508,6 +1563,37 @@ Examples dd_name: sysin content: " LISTCAT ENTRIES('SOME.DATASET.*')" + - name: Full volume dump using ADDRDSU. + zos_mvs_raw: + program_name: adrdssu + auth: true + dds: + - dd_data_set: + dd_name: dumpdd + data_set_name: mypgm.output.ds + disposition: new + disposition_normal: catalog + disposition_abnormal: delete + space_type: cyl + space_primary: 10 + space_secondary: 10 + record_format: u + record_length: 0 + block_size: 32760 + type: seq + - dd_volume: + dd_name: voldd + volume_name: "000000" + unit: "3390" + disposition: old + - dd_input: + dd_name: sysin + content: " VOLDUMP VOL(voldd) DSNAME(dumpdd) FULL" + - dd_output: + dd_name: sysprint + return_content: + type: text + - name: List data sets matching patterns in catalog, save output to a new sequential data set and return output as text. zos_mvs_raw: @@ -1778,6 +1864,28 @@ Examples VOLUMES(222222) - UNIQUE) + - name: Simple FTP connection using frist and second columns. + zos_mvs_raw: + program_name: AMAPDUPL + auth: true + dds: + - dd_output: + dd_name: sysprint + return_content: + type: text + - dd_data_set: + dd_name: SYSUT1 + data_set_name: myhlq.ds1.output + disposition: shr + - dd_input: + dd_name: sysin + reserved_cols: 0 + content: | + USERID=anonymous + PASSWORD=anonymous + TARGET_SYS=testcase.boulder.ibm.com + TARGET_DSN=wessamp.bigfile + - name: List data sets matching pattern in catalog, save output to a new generation of gdgs. zos_mvs_raw: diff --git a/docs/source/modules/zos_replace.rst b/docs/source/modules/zos_replace.rst index 44c2dbb121..3a0dfcce34 100644 --- a/docs/source/modules/zos_replace.rst +++ b/docs/source/modules/zos_replace.rst @@ -40,7 +40,7 @@ backup When set to ``true``, the module creates a backup file or data set. - The backup file name will be returned on either success or failure of module execution such that data can be retrieved. + The backup file name will be returned if *backup* is ``true`` on either success or failure of module execution such that data can be retrieved. | **required**: False | **type**: bool @@ -52,13 +52,17 @@ backup_name If *src* is a USS file or path, backup_name must be a file or path name, and it must be an absolute path name. - If the source is an MVS data set, *backup_name* must be an MVS data set name, and the data set must not be preallocated. + If the source is an MVS data set, *backup_name* must be an MVS data set name, and the data set must **not** be preallocated. + + If it is a Generation Data Set (GDS), use a relative positive name, e.g., *SOME.CREATION(+1*). If *backup_name* is not provided, a default name will be used. If the source is a USS file or path, the name of the backup file will be the source file or path name appended with a timestamp, e.g. ``/path/file_name.2020-04-23-08-32-29-bak.tar``. + If *src* is a seq data set and backup_name is not provided, the data set will be backed up to seq data set with a randomly generated name. + If *src* is a data set member and backup_name is not provided, the data set member will be backed up to the same partitioned data set with a randomly generated member name. - If it is a Generation Data Set (GDS), use a relative positive name, e.g., *SOME.CREATION(+1*). + If *src* is a Generation Data Set (GDS) and backup_name is not provided, backup will be a sequential data set. | **required**: False | **type**: str @@ -74,7 +78,7 @@ before encoding - The character set of the source *target*. `zos_replace <./zos_replace.html>`_ requires it to be provided with correct encoding to read the content of a USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. + The character set for data in the *target*. Module `zos_replace <./zos_replace.html>`_ requires the encoding to correctly read the content of a USS file or data set. If this parameter is not provided, this module assumes that USS file or data set is encoded in IBM-1047. Supported character sets rely on the charset conversion utility (iconv) version; the most common character sets are supported. @@ -83,7 +87,7 @@ encoding | **default**: IBM-1047 -disable_regex +literal A list or string that allows the user to specify choices "before", "after", or "regexp" as regular strings instead of regex patterns. | **required**: False @@ -91,11 +95,11 @@ disable_regex target - The location can be a UNIX System Services (USS) file, PS (sequential data set), member of a PDS or PDSE, PDS, PDSE. + The location can be a UNIX System Services (USS) file, PS (sequential data set), PDS, PDSE, member of a PDS or PDSE. The USS file must be an absolute pathname. - It is possible to use a generation data set (GDS) relative name of generation already. created. e.g. *SOME.CREATION(-1*). + It is possible to use a generation data set (GDS) relative name of generation already created. e.g. *SOME.CREATION(-1*). | **required**: True | **type**: str @@ -151,25 +155,25 @@ Examples zos_replace: target: SAMPLE.SOURCE regexp: //*LIB DD UNIT=SYS,SPACE=(TRK,(1,1)),VOL=SER=vvvvvv - replace: //*LIB DD UNIT=SYS,SPACE=(CYL,(1,1)) + replace: //*LIB DD UNIT=SYS,SPACE=(CYL,(1,1)) after: '^\$source base \([^\s]+\)' - disable_regex: regexp + literal: regexp - name: Replace a specific line before a specific sentence with backup zos_replace: target: SAMPLE.SOURCE - backup: True + backup: true regexp: //SYSPRINT DD SYSOUT=* before: SAMPLES OUTPUT SYSIN *=$DSN - disable_regex: - - regexp - - before + literal: + - regexp + - before - name: Replace some words between two lines with a backup with tmp_hlq zos_replace: target: SAMPLE.DATASET tmp_hlq: ANSIBLE - backup: True + backup: true backup_name: BACKUP.DATASET regexp: var replace: vars @@ -182,7 +186,7 @@ Examples regexp: ^(IEE132I|IEA989I|IEA888I|IEF196I|IEA000I)\s.* after: ^IEE133I PENDING * before: ^IEE252I DEVICE * - backup: True + backup: true backup_name: "SOURCE.GDG(+1)" - name: Delete some calls to SYSTEM on a member using a backref @@ -196,6 +200,12 @@ Examples +Notes +----- + +.. note:: + For supported character sets used to encode data, refer to the `documentation `_. + @@ -232,7 +242,7 @@ found | **sample**: 5 msg - Error messages from the module + A string with a generic or error message relayed to the user. | **returned**: failure | **type**: str diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index 3d5b74e13f..e85fdb14f0 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -223,7 +223,6 @@ template_parameters | **default**: \\n | **choices**: \\n, \\r, \\r\\n - auto_reload Whether to reload a template file when it has changed after the task has started. diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index c252feb3b1..eec87c3eca 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -53,7 +53,7 @@ format name - The compression format to use. + The compression format used while archiving. | **required**: True | **type**: str @@ -81,7 +81,7 @@ format use_adrdssu - If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using ``xmit`` or ``terse``. + If set to true, the ``zos_unarchive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using ``xmit`` or ``terse``. | **required**: False | **type**: bool @@ -357,6 +357,46 @@ remote_src | **default**: False +encoding + Specifies the character encoding conversion to be applied to the destination files after unarchiving. + + Supported character sets rely on the charset conversion utility ``iconv`` version the most common character sets are supported. + + After conversion the files are stored in same location as they were unarchived to under the same original name. No backup of the original unconverted files is there as for that unarchive can be executed again without encoding params on same source archive files. + + Destination files will be converted to the new encoding and will not be restored to their original encoding. + + If encoding fails for any file in a set of multiple files, an exception will be raised and the name of the file skipped will be provided completing the task successfully with rc code 0. + + Encoding does not check if the file is already present or not. It works on the file/files successfully unarchived. + + | **required**: False + | **type**: dict + + + from + The character set of the source *src*. + + | **required**: False + | **type**: str + + + to + The destination *dest* character set for the files to be written as. + + | **required**: False + | **type**: str + + + skip_encoding + List of names to skip encoding after unarchiving. This is only used if *encoding* is set, otherwise is ignored. + + | **required**: False + | **type**: list + | **elements**: str + + + Attributes @@ -422,6 +462,27 @@ Examples use_adrdssu: true list: true + # Encoding example + - name: Encode the destination data set into Latin-1 after unarchiving. + zos_unarchive: + src: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + encoding: + from: IBM-1047 + to: ISO8859-1 + + - name: Encode the destination data set into Latin-1 after unarchiving. + zos_unarchive: + src: "USER.ARCHIVE.RESULT.TRS" + format: + name: terse + encoding: + from: IBM-1047 + to: ISO8859-1 + skip_encoding: + - USER.ARCHIVE.TEST1 + @@ -474,3 +535,21 @@ missing | **returned**: success | **type**: str +encoded + List of files or data sets that were successfully encoded. + + | **returned**: success + | **type**: list + +failed_on_encoding + List of files or data sets that were failed while encoding. + + | **returned**: success + | **type**: list + +skipped_encoding_targets + List of files or data sets that were skipped while encoding. + + | **returned**: success + | **type**: list + diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst index 8f60671c38..82e5c559ea 100644 --- a/docs/source/release_notes.rst +++ b/docs/source/release_notes.rst @@ -6,16 +6,86 @@ Releases ======== +Version 1.15.0-beta.1 +===================== + +Minor Changes +------------- + +- ``zos_archive`` + + - Adds support for encoding before archiving files. + - Adds support for skipping encoding in archive module. This allows users to skip encoding for certain files before archiving them. + - Adds support for reverting the encoding of a source's files after archiving them. + +- ``zos_copy`` + + - Adds new option `identical_gdg_copy` in the module. This allows copying GDG generations from a source base to a destination base while preserving generation data set absolute names when the destination base does not exist prior to the copy. + - Adds support of using alias names in src and dest parameters for PS, PDS and PDSE data sets. + - Added support for british pound character usage in file content and data set names for both source and destination when copying. + +- ``zos_fetch`` - Updated the documentation to correctly state what the default behavior of the module is. +- ``zos_find`` + + - Adds functionality to find migrated data sets. + - Adds functionality to find different types of data sets at the same time. + +- ``zos_job_output`` - Adds new fields cpu_time, origin_node and execution_node to response. +- ``zos_job_query`` - Adds new fields cpu_time, origin_node and execution_node to response. +- ``zos_job_submit`` - Adds new fields cpu_time, origin_node and execution_node to response. + +- ``zos_mvs_raw`` + + - Before this addition, you could not put anything in columns 1 or 2, were reserved for JCL processing. Change now allows add reserved_cols option and validate that the module get access to modify dd_content option base on the value, if not retain the previous behavior or work. + - Adds support for volume data definition. + +- ``zos_stat`` + + - Added support to recall migrated data sets and return its attributes. + - Adds new fields that describe the type of the resource that was queried. These new fields are ``isfile``, ``isdataset``, ``isaggregate`` and ``isgdg``. + - Adds support to query data sets using their aliases. + - Module now returns whether the resource queried exists on the managed node with the `exists` field inside `stat`. + +- ``zos_unarchive`` - Added encoding support in the zos_unarchive module. This allows users to encode the files after unarchiving them. + +Bugfixes +-------- +- ``zos_backup_restore`` - Return value ``backup_name`` was empty upon successful result. Fix now returns ``backup_name`` populated. +- ``zos_data_set`` - Attempting to create a data set with the same name on a different volume did not work, nor did it report a failure. The fix now informs the user that if the data set is cataloged on a different volume, it needs to be uncataloged before using the data set module to create a new data set on a different volume. +- ``zos_fetch`` - Previously, the use of `become` would result in a permissions error while trying to fetch a data set or a member. Fix now allows a user to escalate privileges when fetching resources. +- ``zos_lineinfile`` + + - Return values ``return_content`` and ``backup_name`` were not always being returned. Fix now ensure that these values are always present in the module's response. + - The module would report a false negative when certain special characters where present in the `line` option. Fix now reports the successful operation. + +- ``zos_mount`` - FSUMF168 return in stderror means that the mount dataset wouldn't resolve. While this shows a catalog or volume issue, it should not impact our search for an existing mount. Added handling to the df call, so that FSUMF168 are ignored. + + +New Modules +----------- + +- ibm.ibm_zos_core.zos_replace - Replace all instances of a pattern within a file or data set. + +Availability +------------ +* `Galaxy`_ +* `GitHub`_ + +Known Issues +------------ +- ``zos_copy`` - Copying from a sequential data set that is in use will result in a false positive and destination data set will be empty. The same is true when ``type=gdg`` and source GDS is a sequential data set in use. + + Version 1.14.1 ============== Bugfixes -------- -- zos_copy - Previously, if the Ansible user was not a superuser copying a file into the managed node resulted in a permission denied error. Fix now sets the correct permissions for the Ansible user for copying to the remote. -- zos_job_submit - Previously, if the Ansible user was not a superuser copying a file into the managed node resulted in a permission denied error. Fix now sets the correct permissions for the Ansible user for copying to the remote. -- zos_script - Previously, if the Ansible user was not a superuser copying a file into the managed node resulted in a permission denied error. Fix now sets the correct permissions for the Ansible user for copying to the remote. -- zos_unarchive - Previously, if the Ansible user was not a superuser copying a file into the managed node resulted in a permission denied error. Fix now sets the correct permissions for the Ansible user for copying to the remote. +- ``zos_copy`` - Previously, if the Ansible user was not a superuser copying a file into the managed node resulted in a permission denied error. Fix now sets the correct permissions for the Ansible user for copying to the remote. +- ``zos_job_submit`` - Previously, if the Ansible user was not a superuser copying a file into the managed node resulted in a permission denied error. Fix now sets the correct permissions for the Ansible user for copying to the remote. +- ``zos_script`` - Previously, if the Ansible user was not a superuser copying a file into the managed node resulted in a permission denied error. Fix now sets the correct permissions for the Ansible user for copying to the remote. +- ``zos_unarchive`` - Previously, if the Ansible user was not a superuser copying a file into the managed node resulted in a permission denied error. Fix now sets the correct permissions for the Ansible user for copying to the remote. Availability ------------ diff --git a/galaxy.yml b/galaxy.yml index ff1c32d67a..3780418a2c 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: "1.14.1" +version: "1.15.0-beta.1" # Collection README file readme: README.md diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 733d007be0..ebfe479716 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,5 +1,5 @@ name: ibm_zos_core -version: "1.14.1" +version: "1.15.0-beta.1" managed_requirements: - name: "IBM Open Enterprise SDK for Python" From 26453eb84c1206358f06951ffea5084230feccfd Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Thu, 7 Aug 2025 10:12:50 -0600 Subject: [PATCH 06/73] [Enhancement][zos_encode] Interface update to zos_encode (#2228) * Updated zos_encode * updated test cases * Updated encoding tests * Added encoding dictionary to the docs * Updated changelogs * Fixed docs * Update zos_encode.py --- .../2228-zos_encode-interface-update.yml | 3 + plugins/modules/zos_encode.py | 29 ++++-- .../modules/test_zos_encode_func.py | 90 ++++++++++++++++++- 3 files changed, 111 insertions(+), 11 deletions(-) create mode 100644 changelogs/fragments/2228-zos_encode-interface-update.yml diff --git a/changelogs/fragments/2228-zos_encode-interface-update.yml b/changelogs/fragments/2228-zos_encode-interface-update.yml new file mode 100644 index 0000000000..6988482e1d --- /dev/null +++ b/changelogs/fragments/2228-zos_encode-interface-update.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_encode - Adds new return value ``encoding`` with ``from`` and ``to`` encoding values used in the operation. + (https://github.com/ansible-collections/ibm_zos_core/pull/2228). \ No newline at end of file diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index ffd0f7ab62..d174bdbd0d 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -304,6 +304,25 @@ returned: changed and if backup=yes type: str sample: /path/file_name.2020-04-23-08-32-29-bak.tar +encoding: + description: + - Specifies which encodings the destination file or data set was + converted from and to. + type: dict + returned: always + contains: + from: + description: + - The character set of the source I(src). + type: str + sample: IBM-1047 + returned: always + to: + description: + - The destination I(dest) character set for the output that was written as. + type: str + sample: ISO8859-1 + returned: always """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( ZOAUImportError, @@ -536,10 +555,8 @@ def run_module(): dest_data_set = None convert_rc = False changed = False - - result = dict(changed=changed, src=src, dest=dest) - if backup: - result["backup_name"] = None + encoding_dict = {"from": from_encoding, "to": to_encoding} + result = dict(changed=changed, src=src, dest=dest, encoding=encoding_dict, backup_name=None) try: # Check the src is a USS file/path or an MVS data set @@ -701,9 +718,7 @@ def run_module(): eu.uss_tag_encoding(new_dest, to_encoding) changed = True - result = dict(changed=changed, src=new_src, dest=new_dest, backup_name=backup_name) - else: - result = dict(src=new_src, dest=new_dest, changed=changed, backup_name=backup_name) + result.update(dict(src=new_src, dest=new_dest, changed=changed, backup_name=backup_name)) except encode.TaggingError as e: module.fail_json( msg=e.msg, diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 135e4dba86..c1d5805997 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -245,6 +245,10 @@ def test_uss_encoding_conversion_uss_file_to_uss_file(ansible_zos_module): assert result.get("dest") == uss_dest_file assert result.get("backup_name") is None assert result.get("changed") is True + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_file}") for result in tag_results.contacted.values(): @@ -274,6 +278,9 @@ def test_uss_encoding_conversion_uss_file_to_uss_path(ansible_zos_module): assert result.get("dest") == uss_dest_path assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_path}/{path.basename(uss_file)}") for result in tag_results.contacted.values(): @@ -306,6 +313,9 @@ def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): assert result.get("dest") == uss_dest_path assert result.get("backup_name") is not None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_path}") for result in tag_results.contacted.values(): @@ -338,6 +348,9 @@ def test_uss_encoding_conversion_uss_file_to_mvs_ps(ansible_zos_module): assert result.get("dest") == mvs_ps assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING finally: hosts.all.file(path=uss_file, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -365,6 +378,9 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): assert result.get("dest") == uss_dest_file assert result.get("backup_name") is not None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_file}") for result in tag_results.contacted.values(): @@ -397,6 +413,9 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds(ansible_zos_module): assert result.get("dest") == mvs_ps assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING finally: hosts.all.file(path=uss_file, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -435,6 +454,9 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds_member(ansible_zos_module): assert result.get("dest") == mvs_pds_member assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING finally: hosts.all.file(path=uss_file, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -471,6 +493,9 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): assert result.get("dest") == uss_dest_file assert result.get("backup_name") is not None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_file}") for result in tag_results.contacted.values(): @@ -509,6 +534,10 @@ def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): assert result.get("dest") == mvs_ps assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING + hosts.all.file(path=uss_dest_path, state="directory") results = hosts.all.zos_encode( src=mvs_ps, @@ -524,6 +553,9 @@ def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): assert result.get("dest") == uss_dest_path assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_path}") for result in tag_results.contacted.values(): @@ -559,6 +591,9 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_pds_member(ansible_zos_module): assert result.get("dest") == mvs_pds_member assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING hosts.all.zos_data_set(name=mvs_ps, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -575,10 +610,11 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): cmd=f"echo {quote(KSDS_CREATE_JCL.format(mvs_vs))} > {temp_jcl_path}/SAMPLE" ) results = hosts.all.zos_job_submit( - src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", remote_src=True, wait_time=30 ) for result in results.contacted.values(): + print(result) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True @@ -595,6 +631,9 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): assert result.get("dest") == mvs_vs assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING finally: hosts.all.file(path=temp_jcl_path, state="absent") hosts.all.file(path=uss_file, state="absent") @@ -623,6 +662,9 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): assert result.get("dest") == uss_dest_file assert result.get("backup_name") is not None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_file}") for result in tag_results.contacted.values(): @@ -658,6 +700,10 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_ps(ansible_zos_module): assert result.get("dest") == mvs_ps assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING + hosts.all.zos_data_set(name=mvs_vs, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -691,6 +737,10 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_pds_member(ansible_zos_module): assert result.get("dest") == mvs_pds_member assert result.get("backup_name") is None assert result.get("changed") is True + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING hosts.all.zos_data_set(name=mvs_vs, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -707,7 +757,7 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): cmd=f"echo {quote(KSDS_CREATE_JCL.format(mvs_vs))} > {temp_jcl_path}/SAMPLE" ) results = hosts.all.zos_job_submit( - src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", remote_src=True, wait_time=30 ) for result in results.contacted.values(): assert result.get("jobs") is not None @@ -728,6 +778,10 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): assert result.get("dest") == mvs_vs assert result.get("backup_name") is None assert result.get("changed") is True + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING finally: hosts.all.file(path=temp_jcl_path, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -755,6 +809,10 @@ def test_uss_encoding_conversion_src_with_special_chars(ansible_zos_module): assert result.get("backup_name") is None assert result.get("changed") is True assert result.get("msg") is None + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING finally: hosts.all.zos_data_set(name=src_data_set, state="absent") @@ -813,6 +871,10 @@ def test_pds_backup_with_tmp_hlq_option(ansible_zos_module): ) for enc_res in encode_res.contacted.values(): assert enc_res.get("backup_name")[:6] == tmphlq + assert enc_res.get("encoding") is not None + assert isinstance(enc_res.get("encoding"), dict) + assert enc_res.get("encoding").get("to") == FROM_ENCODING + assert enc_res.get("encoding").get("from") == TO_ENCODING contents = hosts.all.shell(cmd="cat \"//'{0}(SAMPLE)'\"".format(enc_res.get("backup_name"))) hosts.all.file(path=temp_jcl_path, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -873,7 +935,7 @@ def test_vsam_backup(ansible_zos_module): cmd=f"echo {quote(KSDS_CREATE_JCL.format(mvs_vs))} > {temp_jcl_path}/SAMPLE" ) hosts.all.zos_job_submit( - src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", remote_src=True, wait_time=30 ) hosts.all.file(path=temp_jcl_path, state="absent") # submit JCL to populate KSDS @@ -882,7 +944,7 @@ def test_vsam_backup(ansible_zos_module): cmd=f"echo {quote(KSDS_REPRO_JCL.format(mvs_vs.upper()))} > {temp_jcl_path}/SAMPLE" ) hosts.all.zos_job_submit( - src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", remote_src=True, wait_time=30 ) hosts.all.zos_encode( @@ -958,6 +1020,10 @@ def test_uss_backup_entire_folder_to_default_backup_location(ansible_zos_module) backup_name = None for result in results.contacted.values(): backup_name = result.get("backup_name") + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING assert backup_name contents = hosts.all.shell(cmd=f"cat {backup_name}file1") content1 = "" @@ -1026,6 +1092,10 @@ def test_uss_backup_entire_folder_to_default_backup_location_compressed( backup_name = None for result in results.contacted.values(): backup_name = result.get("backup_name") + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING results = hosts.all.shell(cmd=f"ls -la {backup_name[:-4]}*") for result in results.contacted.values(): @@ -1058,6 +1128,10 @@ def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): for content in enc_ds.contacted.values(): assert content.get("backup_name") is not None assert content.get("backup_name") == backup_data_set + assert content.get("encoding") is not None + assert isinstance(content.get("encoding"), dict) + assert content.get("encoding").get("to") == TO_ENCODING + assert content.get("encoding").get("from") == FROM_ENCODING hosts.all.zos_data_set(name=backup_data_set, state="absent") enc_ds = hosts.all.zos_encode( @@ -1074,6 +1148,10 @@ def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): assert content.get("msg") is not None assert content.get("backup_name") is not None assert content.get("backup_name") == backup_data_set + assert content.get("encoding") is not None + assert isinstance(content.get("encoding"), dict) + assert content.get("encoding").get("to") == TO_ENCODING + assert content.get("encoding").get("from") == INVALID_ENCODING finally: hosts.all.zos_data_set(name=mvs_ps, state="absent") hosts.all.zos_data_set(name=backup_data_set, state="absent") @@ -1101,6 +1179,10 @@ def test_gdg_encoding_conversion_src_with_invalid_generation(ansible_zos_module, assert "not cataloged" in result.get("msg") assert result.get("backup_name") is None assert result.get("changed") is False + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING finally: hosts.all.shell(cmd=f"""drm "{ds_name}(0)" """) hosts.all.shell(cmd=f"drm {ds_name}") From 19de4314d5772c70ca685f6fbaa35505948842f2 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Mon, 11 Aug 2025 13:33:50 +0530 Subject: [PATCH 07/73] Update zos_started_task.py --- plugins/modules/zos_started_task.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 681e0107ff..c6ed611d68 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -345,6 +345,26 @@ def run_module(): device = device_type if device_type is not None else device_number kwargs = {} + # Validations + if job_account != "" and len(job_account) > 55: + module.fail_json( + msg="job_account value should not exceed 55 characters.", + changed=False + ) + if device_number != "": + devnum_len = len(device_number) + if devnum_len not in (3, 5) or ( devnum_len == 5 and not device_number.startswith("/")): + module.fail_json( + msg="Invalid device_number.", + changed=False + ) + if subsystem_name != "" and len(job_account) > 4: + module.fail_json( + msg="The subsystem_name must be 1 - 4 characters.", + changed=False + ) + # keywaord arguments validation..... + wait_s = 5 use_wait_arg = False @@ -366,7 +386,7 @@ def run_module(): started_task_name = started_task_name + "." + identifier else: module.fail_json( - msg="either member_name or identifier is needed but both are missing.", + msg="one of job_name, member_name or identifier is needed but all are missing.", changed=False ) if operation == 'start': From 256d4c9af95d19952a1d954836641cd0a4309101 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Mon, 11 Aug 2025 18:04:24 +0530 Subject: [PATCH 08/73] Updating testcases --- plugins/module_utils/better_arg_parser.py | 4 +-- plugins/modules/zos_started_task.py | 22 ++++++++------- .../modules/test_zos_started_task_func.py | 27 +++++++++++++++++++ 3 files changed, 41 insertions(+), 12 deletions(-) diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index f61abd4652..ab0fde9292 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -359,7 +359,7 @@ def _member_name_type(self, contents, resolve_dependencies): IGNORECASE, ): raise ValueError( - 'Invalid argument "{0}" for type "data_set".'.format(contents) + 'Invalid argument "{0}" for type "member_name".'.format(contents) ) return str(contents) @@ -391,7 +391,7 @@ def _identifier_name_type(self, contents, resolve_dependencies): IGNORECASE, ): raise ValueError( - 'Invalid argument "{0}" for type "data_set".'.format(contents) + 'Invalid argument "{0}" for type "identifier_name".'.format(contents) ) return str(contents) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index c6ed611d68..35a1536145 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -177,9 +177,9 @@ def prepare_start_command(member, identifier, job_name, job_account, device, vol cmd = 'S '+member if identifier: cmd = cmd + "." + identifier + "," + device + "," + volume_serial + "," + parameters - if jobname: + if job_name: cmd = cmd + ",jobname=" + job_name - if jobaccount: + if job_account: cmd = cmd + ",jobacct=" + job_account if subsystem_name: cmd = cmd + ",SUB=" + subsystem_name @@ -209,14 +209,15 @@ def run_module(): 'required': False, 'aliases': ['member'] }, - 'identifier': { + 'identifier_name': { 'arg_type': 'str', - 'required': False + 'required': False, + 'aliases': ['identifier'] }, 'job_name': { 'type': 'str', 'required': False, - 'aliases': ['task_name'] + 'aliases': ['job', 'task_name', 'task'] }, 'job_account': { #55 chars 'type': 'str', @@ -275,12 +276,13 @@ def run_module(): }, 'identifier_name': { 'arg_type': 'identifier_name', - 'required': False + 'required': False, + 'aliases': ['identifier'] }, 'job_name': { 'arg_type': 'str', 'required': False, - 'aliases': ['job'] + 'aliases': ['job', 'task_name', 'task'] }, 'job_account': { 'arg_type': 'str', @@ -346,19 +348,19 @@ def run_module(): kwargs = {} # Validations - if job_account != "" and len(job_account) > 55: + if job_account and len(job_account) > 55: module.fail_json( msg="job_account value should not exceed 55 characters.", changed=False ) - if device_number != "": + if device_number: devnum_len = len(device_number) if devnum_len not in (3, 5) or ( devnum_len == 5 and not device_number.startswith("/")): module.fail_json( msg="Invalid device_number.", changed=False ) - if subsystem_name != "" and len(job_account) > 4: + if subsystem_name and len(job_account) > 4: module.fail_json( msg="The subsystem_name must be 1 - 4 characters.", changed=False diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index 04ae70c678..d1246d310a 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -30,6 +30,33 @@ SH sleep 600 /*""" +def test_start_task_with_invalid_member(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + operation="start", + member="SAMPLETASK" + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("stderr") is not None + +def test_start_task_with_invalid_identifier(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + operation="start", + member="SAMPLE", + identifier="$HELLO" + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("stderr") is not None + def test_start_and_cancel_zos_started_task(ansible_zos_module): try: hosts = ansible_zos_module From 8cd6c6913deabb32da8950910efce2c1ae81a64d Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Mon, 11 Aug 2025 19:05:48 +0530 Subject: [PATCH 09/73] Updating test cases --- .../modules/test_zos_started_task_func.py | 37 +++++++++++++++++-- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index d1246d310a..e1a8165b73 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -57,6 +57,35 @@ def test_start_task_with_invalid_identifier(ansible_zos_module): assert result.get("failed") is True assert result.get("stderr") is not None +def test_start_task_with_invalid_jobaccount(ansible_zos_module): + hosts = ansible_zos_module + job_account = "(T043JM,JM00,1,0,0,This is the invalid job account information to test negative scenario)" + start_results = hosts.all.zos_started_task( + operation="start", + member="SAMPLE", + job_account=job_account + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + +def test_start_task_with_invalid_devicenum(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + operation="start", + member="SAMPLE", + device_number="0870" + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + def test_start_and_cancel_zos_started_task(ansible_zos_module): try: hosts = ansible_zos_module @@ -101,7 +130,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): stop_results = hosts.all.zos_started_task( operation="cancel", - started_task_name="SAMPLE" + task_name="SAMPLE" ) for result in stop_results.contacted.values(): @@ -123,7 +152,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): display_result = hosts.all.zos_started_task( operation="display", - started_task_name="SAMPLE" + task_name="SAMPLE" ) for result in display_result.contacted.values(): print(result) @@ -135,7 +164,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): stop_results = hosts.all.zos_started_task( operation="cancel", - started_task_name="SAMPLE", + task_name="SAMPLE", asid=asid_val ) @@ -200,7 +229,7 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): stop_results = hosts.all.zos_started_task( operation="cancel", - started_task_name="TESTTSK" + task_name="TESTTSK" ) for result in stop_results.contacted.values(): From 73190da8aad6e437de2ce5fc3422b34daeadea70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Mon, 11 Aug 2025 16:25:32 -0500 Subject: [PATCH 10/73] [Enabler][2142]update_zos_copy_interface (#2232) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Test * Alter names * Remove and add values to parameter validation * Replace on test suite * Replace fix * Add option to replace to work properly * Debug * Fix checksum * Fix pushed checksum * Fix copy * Fix changes on job utilities * Debug test * Debug test * Complete validation * Validate copy * Fix copy allocate * Fix testing validations * Fix str * Debug * Debug line * Debug * Return copy for zoau call * Try validate force work * Add fragment * Try validate force work * Fix sanity * Modify fragment * Update plugins/modules/zos_copy.py Co-authored-by: Fernando Flores --------- Co-authored-by: André Marcel Gutiérrez Benítez Co-authored-by: Fernando Flores --- .../2232_Update_zos_copy_interface.yml | 6 + plugins/action/zos_copy.py | 16 +- plugins/module_utils/copy.py | 6 +- plugins/modules/zos_copy.py | 212 +++--- .../functional/modules/test_zos_copy_func.py | 618 +++++++++++++----- 5 files changed, 572 insertions(+), 286 deletions(-) create mode 100644 changelogs/fragments/2232_Update_zos_copy_interface.yml diff --git a/changelogs/fragments/2232_Update_zos_copy_interface.yml b/changelogs/fragments/2232_Update_zos_copy_interface.yml new file mode 100644 index 0000000000..2e850186e4 --- /dev/null +++ b/changelogs/fragments/2232_Update_zos_copy_interface.yml @@ -0,0 +1,6 @@ +breaking_changes: + - zos_copy - Option ``force_lock`` is deprecated in favor of ``force`` for using datasets on dsp=shr. + Option ``force`` is deprecated in favor of ``replace`` for cases you want to replace a dest already exists. + Option ``executable`` is deprecated in favor of ``is_executable``. + Now return value ``dest_created`` is always return with bool value. + (https://github.com/ansible-collections/ibm_zos_core/pull/2232). \ No newline at end of file diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 1c19b31de6..bd6335017f 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -51,13 +51,13 @@ def run(self, tmp=None, task_vars=None): dest = task_args.get('dest', None) content = task_args.get('content', None) - force = _process_boolean(task_args.get('force'), default=True) + replace = _process_boolean(task_args.get('replace'), default=True) backup = _process_boolean(task_args.get('backup'), default=False) local_follow = _process_boolean(task_args.get('local_follow'), default=False) remote_src = _process_boolean(task_args.get('remote_src'), default=False) is_binary = _process_boolean(task_args.get('is_binary'), default=False) - force_lock = _process_boolean(task_args.get('force_lock'), default=False) - executable = _process_boolean(task_args.get('executable'), default=False) + force = _process_boolean(task_args.get('force'), default=False) + is_executable = _process_boolean(task_args.get('is_executable'), default=False) asa_text = _process_boolean(task_args.get('asa_text'), default=False) ignore_sftp_stderr = _process_boolean(task_args.get("ignore_sftp_stderr"), default=True) backup_name = task_args.get("backup_name", None) @@ -116,8 +116,8 @@ def run(self, tmp=None, task_vars=None): msg = "Both 'is_binary' and 'asa_text' are True. Unable to copy binary data as an ASA text file." return self._exit_action(result, msg, failed=True) - if executable and asa_text: - msg = "Both 'executable' and 'asa_text' are True. Unable to copy an executable as an ASA text file." + if is_executable and asa_text: + msg = "Both 'is_executable' and 'asa_text' are True. Unable to copy an is_executable as an ASA text file." return self._exit_action(result, msg, failed=True) use_template = _process_boolean(task_args.get("use_template"), default=False) @@ -130,9 +130,9 @@ def run(self, tmp=None, task_vars=None): msg = "Cannot specify 'mode', 'owner' or 'group' for MVS destination" return self._exit_action(result, msg, failed=True) - if force_lock: + if force: display.warning( - msg="Using force_lock uses operations that are subject to race conditions and can lead to data loss, use with caution.") + msg="Using force uses operations that are subject to race conditions and can lead to data loss, use with caution.") template_dir = None if not remote_src: @@ -293,7 +293,7 @@ def run(self, tmp=None, task_vars=None): path = os.path.normpath(f"{self.tmp_dir}/ansible-zos-copy") rm_res = self._connection.exec_command(f"rm -rf {path}*") - if copy_res.get("note") and not force: + if copy_res.get("note") and not replace: result["note"] = copy_res.get("note") return result diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index f5fd194481..1e8691786c 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -209,7 +209,7 @@ def copy_vsam_ps(src, dest, tmphlq=None): return rc, out, err -def copy_asa_uss2mvs(src, dest, tmphlq=None, force_lock=False): +def copy_asa_uss2mvs(src, dest, tmphlq=None, force=False): """Copy a file from USS to an ASA sequential data set or PDS/E member. Parameters @@ -220,7 +220,7 @@ def copy_asa_uss2mvs(src, dest, tmphlq=None, force_lock=False): The MVS destination data set or member. tmphlq : str High Level Qualifier for temporary datasets. - force_lock : bool + force : bool Whether to open the destination in SHR mode. Returns @@ -236,7 +236,7 @@ def copy_asa_uss2mvs(src, dest, tmphlq=None, force_lock=False): # Removes escaping to execute this command dest = dest.replace('\\', '') src = src.replace('\\', '') - dest_dsp = "shr" if force_lock else "old" + dest_dsp = "shr" if force else "old" ocopy_cmd = "OCOPY INDD(DSSRC) OUTDD(DSTAR) TEXT" ocopy_dds = { diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 0a9c0d0902..c73aea2957 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -45,7 +45,7 @@ format (FBA) or Variable Block with ANSI format (VBA), the module will fail. - This option is only valid for text files. If C(is_binary) is C(true) - or C(executable) is C(true) as well, the module will fail. + or C(is_executable) is C(true) as well, the module will fail. type: bool default: false required: false @@ -116,7 +116,7 @@ C(src). If C(src) is a USS file, C(dest) will have a Fixed Block (FB) record format and the remaining attributes will be computed. If I(is_binary=true), C(dest) will have a Fixed Block (FB) record format with a record length of 80, block size of 32720, and the remaining - attributes will be computed. If I(executable=true),C(dest) will have an Undefined (U) record + attributes will be computed. If I(is_executable=true),C(dest) will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. - If C(src) is a file and C(dest) a partitioned data set, C(dest) does not need to include @@ -176,7 +176,7 @@ that is not available, then the value C(TMPHLQ) is used. required: false type: str - force: + replace: description: - If set to C(true) and the remote file or data set C(dest) is empty, the C(dest) will be reused. @@ -193,14 +193,14 @@ type: bool default: false required: false - force_lock: + force: description: - By default, when C(dest) is a MVS data set and is being used by another - process with DISP=SHR or DISP=OLD the module will fail. Use C(force_lock) + process with DISP=SHR or DISP=OLD the module will fail. Use C(force) to bypass DISP=SHR and continue with the copy operation. - If set to C(true) and destination is a MVS data set opened by another process then zos_copy will try to copy using DISP=SHR. - - Using C(force_lock) uses operations that are subject to race conditions + - Using C(force) uses operations that are subject to race conditions and can lead to data loss, use with caution. - If a data set member has aliases, and is not a program object, copying that member to a dataset that is in use will result in @@ -236,10 +236,10 @@ type: bool default: false required: false - executable: + is_executable: description: - If set to C(true), indicates that the file or library to be copied is an executable. - - If I(executable=true), and C(dest) is a data set, it must be a PDS or PDSE (library). + - If I(is_executable=true), and C(dest) is a data set, it must be a PDS or PDSE (library). - If C(dest) is a nonexistent data set, the library attributes assigned will be Undefined (U) record format with a record length of 0, block size of 32760 and the remaining attributes will be computed. @@ -253,7 +253,7 @@ - If set to C(true), indicates that any aliases found in the source (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. - Aliases are implicitly preserved when libraries are copied over to USS destinations. - That is, when C(executable=True) and C(dest) is a USS file or directory, this option will be ignored. + That is, when C(is_executable=True) and C(dest) is a USS file or directory, this option will be ignored. - Copying of aliases for text-based data sets from USS sources or to USS destinations is not currently supported. - If the C(dest) is Unix, the alias is not visible in Unix, even though the information is there and will be visible if copied to a library. @@ -566,7 +566,7 @@ transfers, if not available, the module will fail. - Beginning in version 1.8.x, zos_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this - behavior using module option C(executable) that will signify an executable is being + behavior using module option C(is_executable) that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos_copy.html) error. - It is the playbook author or user's responsibility to ensure they have @@ -682,14 +682,14 @@ src: HLQ.SAMPLE.PDSE dest: HLQ.EXISTING.PDSE remote_src: true - force: true + replace: true - name: Copy PDS member to a new PDS member. Replace if it already exists zos_copy: src: HLQ.SAMPLE.PDSE(SRCMEM) dest: HLQ.NEW.PDSE(DESTMEM) remote_src: true - force: true + replace: true - name: Copy a USS file to a PDSE member. If PDSE does not exist, allocate it zos_copy: @@ -753,7 +753,7 @@ src: HLQ.COBOLSRC.PDSE(TESTPGM) dest: HLQ.NEW.PDSE(MYCOBOL) remote_src: true - executable: true + is_executable: true aliases: true - name: Copy a Load Library from a USS directory /home/loadlib to a new PDSE @@ -761,7 +761,7 @@ src: '/home/loadlib/' dest: HLQ.LOADLIB.NEW remote_src: true - executable: true + is_executable: true aliases: true - name: Copy a file with ASA characters to a new sequential data set. @@ -897,7 +897,7 @@ sample: file note: description: A note to the user after module terminates. - returned: When ``force=true`` and ``dest`` exists + returned: When ``replace=true`` and ``dest`` exists type: str sample: No data was copied msg: @@ -987,11 +987,11 @@ def __init__( self, module, is_binary=False, - executable=False, + is_executable=False, aliases=False, asa_text=False, backup_name=None, - force_lock=False, + force=False, identical_gdg_copy=False, tmphlq=None ): @@ -1008,7 +1008,7 @@ def __init__( is_binary : bool Whether the file or data set to be copied contains binary data. - executable : bool + is_executable : bool Whether the file or data set to be copied is executable. asa_text : bool @@ -1018,7 +1018,7 @@ def __init__( backup_name : str The USS path or data set name of destination backup. - force_lock : str + force : str Whether the dest data set should be copied into using disp=shr when is opened by another process. @@ -1033,7 +1033,7 @@ def __init__( is_binary : bool Whether the file or data set to be copied contains binary data. - executable : bool + is_executable : bool Whether the file or data set to be copied is executable. asa_text : bool @@ -1043,7 +1043,7 @@ def __init__( backup_name : str The USS path or data set name of destination backup. - force_lock : str + force : str Whether the dest data set should be copied into using disp=shr when is opened by another process. @@ -1052,11 +1052,11 @@ def __init__( """ self.module = module self.is_binary = is_binary - self.executable = executable + self.is_executable = is_executable self.asa_text = asa_text self.aliases = aliases self.backup_name = backup_name - self.force_lock = force_lock + self.force = force self.identical_gdg_copy = identical_gdg_copy self.tmphlq = tmphlq @@ -1110,7 +1110,7 @@ def copy_to_seq( copy_args["options"] = "" if src_type == 'USS' and self.asa_text: - response = copy.copy_asa_uss2mvs(new_src, dest, tmphlq=self.tmphlq, force_lock=self.force_lock) + response = copy.copy_asa_uss2mvs(new_src, dest, tmphlq=self.tmphlq, force=self.force) if response.rc != 0: raise CopyOperationError( @@ -1126,7 +1126,7 @@ def copy_to_seq( copy_args["options"] = "-B" try: - datasets.copy(new_src, dest, force=self.force_lock, **copy_args) + datasets.copy(new_src, dest, force=self.force, **copy_args) except zoau_exceptions.ZOAUException as copy_exception: raise CopyOperationError( msg="Unable to copy source {0} to {1}".format(new_src, dest), @@ -1150,7 +1150,7 @@ def copy_to_vsam(self, src, dest): CopyOperationError When REPRO fails to copy the data set. """ - out_dsp = "shr" if self.force_lock else "old" + out_dsp = "shr" if self.force else "old" dds = {"OUT": "{0},{1}".format(dest.upper(), out_dsp)} repro_cmd = """ REPRO - INDATASET('{0}') - @@ -1564,7 +1564,7 @@ def __init__( self, module, is_binary=False, - executable=False, + is_executable=False, asa_text=False, aliases=False, common_file_args=None, @@ -1600,7 +1600,7 @@ def __init__( super().__init__( module, is_binary=is_binary, - executable=executable, + is_executable=is_executable, asa_text=asa_text, aliases=aliases, backup_name=backup_name, @@ -1616,7 +1616,7 @@ def copy_to_uss( src_ds_type, src_member, member_name, - force, + replace, content_copy, ): """Copy a file or data set to a USS location. @@ -1638,7 +1638,7 @@ def copy_to_uss( Whether src is a data set member. member_name : str The name of the source data set member. - force : bool + replace : bool Whether to copy files to an already existing directory. content_copy : bool Whether copy is using content option or not. @@ -1655,7 +1655,7 @@ def copy_to_uss( src, dest, src_ds_type, src_member, member_name=member_name ) - if self.executable: + if self.is_executable: status = os.stat(dest) os.chmod(dest, status.st_mode | stat.S_IEXEC) else: @@ -1682,7 +1682,7 @@ def copy_to_uss( dest = self._copy_to_file(src, dest, content_copy, conv_path) changed_files = None else: - dest, changed_files = self._copy_to_dir(src, dest, conv_path, force) + dest, changed_files = self._copy_to_dir(src, dest, conv_path, replace) if self.common_file_args is not None: mode = self.common_file_args.get("mode") @@ -1740,7 +1740,7 @@ def _copy_to_file(self, src, dest, content_copy, conv_path): datasets.copy(new_src, dest, **opts) shutil.copystat(new_src, dest, follow_symlinks=True) # shutil.copy(new_src, dest) - if self.executable: + if self.is_executable: status = os.stat(dest) os.chmod(dest, status.st_mode | stat.S_IEXEC) except zoau_exceptions.ZOAUException as err: @@ -1765,7 +1765,7 @@ def _copy_to_dir( src_dir, dest_dir, conv_path, - force + replace ): """Helper function to copy a USS directory to another USS directory. If the path for dest_dir does not end with a trailing slash ("/"), @@ -1779,7 +1779,7 @@ def _copy_to_dir( USS dest directory. conv_path : str Path to the converted source directory. - force :bool + replace :bool Whether to copy files to an already existing directory. Returns @@ -1804,7 +1804,7 @@ def _copy_to_dir( if copy_directory: dest = os.path.join(validation.validate_safe_path(dest_dir), validation.validate_safe_path(os.path.basename(os.path.normpath(src_dir)))) # dest = shutil.copytree(new_src_dir, dest, dirs_exist_ok=force) - dest = self.copy_tree(new_src_dir, dest, dirs_exist_ok=force) + dest = self.copy_tree(new_src_dir, dest, dirs_exist_ok=replace) # Restoring permissions for preexisting files and subdirectories. for filepath, permissions in original_permissions: @@ -1953,7 +1953,7 @@ def _mvs_copy_to_uss( if self.asa_text: response = copy.copy_asa_mvs2uss(src, dest, tmphlq=self.tmphlq) rc = response.rc - elif self.executable: + elif self.is_executable: try: rc = datasets.copy(src, dest, alias=True, executable=True) except zoau_exceptions.ZOAUException as copy_exception: @@ -1986,7 +1986,7 @@ def _mvs_copy_to_uss( raise CopyOperationError( msg=f"Error while copying GDG {src} to {dest}" ) - elif self.executable: + elif self.is_executable: try: datasets.copy(src, dest, alias=True, executable=True) except zoau_exceptions.ZOAUException as copy_exception: @@ -2023,11 +2023,11 @@ def __init__( self, module, is_binary=False, - executable=False, + is_executable=False, aliases=False, asa_text=False, backup_name=None, - force_lock=False, + force=False, tmphlq=None ): """ Utility class to handle copying to partitioned data sets or @@ -2052,11 +2052,11 @@ def __init__( super().__init__( module, is_binary=is_binary, - executable=executable, + is_executable=is_executable, aliases=aliases, asa_text=asa_text, backup_name=backup_name, - force_lock=force_lock, + force=force, tmphlq=tmphlq ) @@ -2107,7 +2107,7 @@ def copy_to_pdse( path, dirs, files = next(os.walk(new_src)) src_members = [ - os.path.normpath("{0}/{1}".format(path, file)) if (self.is_binary or self.executable) + os.path.normpath("{0}/{1}".format(path, file)) if (self.is_binary or self.is_executable) else normalize_line_endings("{0}/{1}".format(path, file), encoding) for file in files ] @@ -2229,7 +2229,7 @@ def copy_to_member( opts["options"] = "" if src_type == 'USS' and self.asa_text: - response = copy.copy_asa_uss2mvs(src, dest, tmphlq=self.tmphlq, force_lock=self.force_lock) + response = copy.copy_asa_uss2mvs(src, dest, tmphlq=self.tmphlq, force=self.force) rc, out, err = response.rc, response.stdout_response, response.stderr_response else: # While ASA files are just text files, we do a binary copy @@ -2238,7 +2238,7 @@ def copy_to_member( opts["options"] = "-B" try: - rc = datasets.copy(src, dest, alias=self.aliases, executable=self.executable, force=self.force_lock, **opts) + rc = datasets.copy(src, dest, alias=self.aliases, executable=self.is_executable, force=self.force, **opts) out = "" err = "" except zoau_exceptions.ZOAUException as copy_exception: @@ -2419,7 +2419,7 @@ def get_data_set_attributes( def create_seq_dataset_from_file( file, dest, - force, + replace, is_binary, asa_text, record_length=None, @@ -2435,7 +2435,7 @@ def create_seq_dataset_from_file( Path of the source file. dest : str Name of the data set. - force : bool + replace : bool Whether to replace an existing data set. is_binary : bool Whether the file has binary data. @@ -2479,7 +2479,7 @@ def create_seq_dataset_from_file( volume=volume ) - data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) + data_set.DataSet.ensure_present(replace=replace, tmp_hlq=tmphlq, **dest_params) def backup_data(ds_name, ds_type, backup_name, tmphlq=None): @@ -2523,7 +2523,7 @@ def is_compatible( src_member, is_src_dir, is_src_inline, - executable, + is_executable, asa_text, src_has_asa_chars, dest_has_asa_chars, @@ -2547,7 +2547,7 @@ def is_compatible( Whether the src is a USS directory. is_src_inline : bool Whether the src comes from inline content. - executable : bool + is_executable : bool Whether the src is a executable to be copied. asa_text : bool Whether the copy operation will handle ASA control characters. @@ -2577,7 +2577,7 @@ def is_compatible( # If source or destination is a sequential data set and executable as true # is incompatible to execute the copy. # ******************************************************************** - if executable: + if is_executable: if src_type in data_set.DataSet.MVS_SEQ or dest_type in data_set.DataSet.MVS_SEQ: return False @@ -2692,7 +2692,7 @@ def does_destination_allow_copy( member_exists, dest_type, is_uss, - force, + replace, volume=None, tmphlq=None ): @@ -2715,7 +2715,7 @@ def does_destination_allow_copy( Type of the destination (SEQ/PARTITIONED/VSAM/USS). is_uss : bool Whether or not the destination is inside USS. - force : bool + replace : bool Whether or not the module can replace existing destinations. volume : str, optional Volume where the destination should be. @@ -2731,21 +2731,21 @@ def does_destination_allow_copy( # If the destination is inside USS and the module doesn't have permission to replace it, # it fails. if is_uss and dest_exists: - if src_type == "USS" and os.path.isdir(dest) and os.path.isdir(src) and not force: + if src_type == "USS" and os.path.isdir(dest) and os.path.isdir(src) and not replace: return False - elif os.path.isfile(dest) and not force: + elif os.path.isfile(dest) and not replace: return False # If the destination is a sequential or VSAM data set and is empty, the module will try to use it, # otherwise, force needs to be True to continue and replace it. if (dest_type in data_set.DataSet.MVS_SEQ or dest_type in data_set.DataSet.MVS_VSAM) and dest_exists: is_dest_empty = data_set.DataSet.is_empty(dest, volume, tmphlq=tmphlq) - if not (is_dest_empty or force): + if not (is_dest_empty or replace): return False # When the destination is a partitioned data set, the module will have to be able to replace # existing members inside of it, if needed. - if dest_type in data_set.DataSet.MVS_PARTITIONED and dest_exists and member_exists and not force: + if dest_type in data_set.DataSet.MVS_PARTITIONED and dest_exists and member_exists and not replace: return False # When the destination is an existing GDG, we'll check that we have enough free generations @@ -2920,9 +2920,9 @@ def allocate_destination_data_set( src_ds_type, dest_ds_type, dest_exists, - force, + replace, is_binary, - executable, + is_executable, asa_text, is_gds, is_active_gds, @@ -2946,11 +2946,11 @@ def allocate_destination_data_set( Type of the destination data set. dest_exists : bool Whether the destination data set already exists. - force : bool + replace : bool Whether to replace an existent data set. is_binary : bool Whether the data set will contain binary data. - executable : bool + is_executable : bool Whether the data to copy is an executable dataset or file. asa_text : bool Whether the data to copy has ASA control characters. @@ -2977,7 +2977,8 @@ def allocate_destination_data_set( src_name = data_set.extract_dsname(src) is_dest_empty = data_set.DataSet.is_empty(dest) if dest_exists else True - # Replacing an existing dataset only when it's not empty. We don't know whether that + # Replace in datasets. + # Reuse empty datasets when replace is not true. We don't know whether that # empty dataset was created for the user by an admin/operator, and they don't have permissions # to create new datasets. # These rules assume that source and destination types are compatible. @@ -2987,6 +2988,8 @@ def allocate_destination_data_set( if dest_exists and (is_dest_empty or dest_ds_type == "GDG"): return False, dest_params, dest + if dest_exists and is_dest_empty and not replace: + return False, dest_params, dest # Giving more priority to the parameters given by the user. # Cover case the user set executable to true to create dataset valid. if dest_data_set: @@ -3018,14 +3021,14 @@ def allocate_destination_data_set( del dest_params["purge"] del dest_params["extended"] del dest_params["fifo"] - data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) + data_set.DataSet.ensure_present(replace=replace, tmp_hlq=tmphlq, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_SEQ: volumes = [volume] if volume else None data_set.DataSet.ensure_absent(dest, volumes=volumes) if src_ds_type == "USS": # Taking the temp file when a local file was copied with sftp. - create_seq_dataset_from_file(src, dest, force, is_binary, asa_text, volume=volume, tmphlq=tmphlq) + create_seq_dataset_from_file(src, dest, replace, is_binary, asa_text, volume=volume, tmphlq=tmphlq) elif src_ds_type in data_set.DataSet.MVS_SEQ: # Only applying the GDS special case when we don't have an absolute name. if is_gds and not is_active_gds: @@ -3043,7 +3046,7 @@ def allocate_destination_data_set( create_seq_dataset_from_file( temp_dump, dest, - force, + replace, is_binary, asa_text, record_length=record_length, @@ -3060,7 +3063,7 @@ def allocate_destination_data_set( if is_gds and not is_active_gds: data_set.DataSet.allocate_gds_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) else: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume, tmphlq=tmphlq) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=is_executable, asa_text=asa_text, vol=volume, tmphlq=tmphlq) elif src_ds_type in data_set.DataSet.MVS_SEQ: src_attributes = datasets.list_datasets(src_name)[0] # The size returned by listing is in bytes. @@ -3077,7 +3080,7 @@ def allocate_destination_data_set( type="PDSE", volume=volume ) - data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) + data_set.DataSet.ensure_present(replace=replace, tmp_hlq=tmphlq, **dest_params) elif src_ds_type == "USS": if os.path.isfile(src): # This is almost the same as allocating a sequential dataset. @@ -3097,7 +3100,7 @@ def allocate_destination_data_set( if asa_text: record_length += 1 - if executable: + if is_executable: record_format = "U" record_length = 0 type_ds = "LIBRARY" @@ -3117,7 +3120,7 @@ def allocate_destination_data_set( size = sum(os.stat("{0}/{1}".format(src, member)).st_size for member in os.listdir(src)) # This PDSE will be created with record format VB and a record length of 1028. - if executable: + if is_executable: dest_params = get_data_set_attributes( dest, size, is_binary, record_format='U', @@ -3135,7 +3138,7 @@ def allocate_destination_data_set( volume=volume ) - data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) + data_set.DataSet.ensure_present(replace=replace, tmp_hlq=tmphlq, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_VSAM: # If dest_data_set is not available, always create the destination using the src VSAM # as a model. @@ -3271,15 +3274,18 @@ def update_result(res_args, original_args): src = res_args.get("src") note = res_args.get("note") backup_name = res_args.get("backup_name") + dest_created = res_args.get("dest_created") dest_data_set_attrs = res_args.get("dest_data_set_attrs") + updated_result = dict( dest=res_args.get("dest"), - is_binary=original_args.get("is_binary"), changed=res_args.get("changed"), invocation=dict(module_args=original_args), + dest_created=dest_created, ) + if src: - updated_result["src"] = original_args.get("src") + updated_result["src"] = src if note: updated_result["note"] = note if backup_name: @@ -3353,7 +3359,7 @@ def run_module(module, arg_def): fail_json Cannot write a partitioned data set (PDS) to a USS file. fail_json - Destination already exists on the system, unable to overwrite unless force=True is specified. + Destination already exists on the system, unable to overwrite unless replace=True is specified. fail_json Unable to allocate destination data set. """ @@ -3387,7 +3393,7 @@ def run_module(module, arg_def): dest = module.params.get('dest') remote_src = module.params.get('remote_src') is_binary = module.params.get('is_binary') - executable = module.params.get('executable') + is_executable = module.params.get('is_executable') asa_text = module.params.get('asa_text') aliases = module.params.get('aliases') backup = module.params.get('backup') @@ -3399,8 +3405,8 @@ def run_module(module, arg_def): encoding = module.params.get('encoding') volume = module.params.get('volume') tmphlq = module.params.get('tmp_hlq') + replace = module.params.get('replace') force = module.params.get('force') - force_lock = module.params.get('force_lock') content = module.params.get('content') identical_gdg_copy = module.params.get('identical_gdg_copy', False) @@ -3512,7 +3518,7 @@ def run_module(module, arg_def): # When the destination is a dataset, we'll normalize the source # file to UTF-8 for the record length computation as Python # generally uses UTF-8 as the default encoding. - if not is_binary and not is_uss and not executable: + if not is_binary and not is_uss and not is_executable: new_src = src new_src = os.path.normpath(new_src) # Normalizing encoding when src is a USS file (only). @@ -3597,7 +3603,7 @@ def run_module(module, arg_def): # dest_data_set.type overrides `dest_ds_type` given precedence rules if dest_data_set and dest_data_set.get("type"): dest_ds_type = dest_data_set.get("type").upper() - elif executable: + elif is_executable: # When executable is selected and dest_exists is false means an executable PDSE was copied to remote, # so we need to provide the correct dest_ds_type that will later be transformed into LIBRARY. # Not using LIBRARY at this step since there are many checks with dest_ds_type in data_set.DataSet.MVS_PARTITIONED @@ -3664,7 +3670,7 @@ def run_module(module, arg_def): src_member, is_src_dir, (src_ds_type == "USS" and src is None), - executable, + is_executable, asa_text, src_has_asa_chars, dest_has_asa_chars, @@ -3688,7 +3694,7 @@ def run_module(module, arg_def): # for try to write in dest and if both src and dest are in lock. # ******************************************************************** if dest_exists and dest_ds_type != "USS": - if not force_lock: + if not force: is_dest_lock = data_set.DataSetUtils.verify_dataset_disposition(data_set=data_set.extract_dsname(dest_name), disposition="old") if is_dest_lock: module.fail_json( @@ -3701,11 +3707,11 @@ def run_module(module, arg_def): # Alias support is not avaiable to and from USS for text-based data sets. # ******************************************************************** if aliases: - if (src_ds_type == 'USS' or dest_ds_type == 'USS') and not executable: + if (src_ds_type == 'USS' or dest_ds_type == 'USS') and not is_executable: module.fail_json( msg="Alias support for text-based data sets is not available " + "for USS sources (src) or targets (dest). " - + "Try setting executable=True or aliases=False." + + "Try setting is_executable=True or aliases=False." ) # ******************************************************************** @@ -3786,12 +3792,12 @@ def run_module(module, arg_def): dest_member_exists, dest_ds_type, is_uss, - force, + replace, volume, tmphlq ): module.fail_json( - msg="{0} already exists on the system, unable to overwrite unless force=True is specified.".format(raw_dest), + msg="{0} already exists on the system, unable to overwrite unless replace=True is specified.".format(raw_dest), changed=False, dest=dest ) @@ -3811,9 +3817,9 @@ def run_module(module, arg_def): src_ds_type, dest_ds_type, dest_exists, - force, + replace, is_binary, - executable, + is_executable, asa_text, is_dest_gds, is_dest_gds_active, @@ -3821,6 +3827,10 @@ def run_module(module, arg_def): volume=volume, tmphlq=tmphlq ) + if res_args["changed"]: + res_args["dest_created"] = True + else: + res_args["dest_created"] = False except Exception as err: if converted_src: src = original_src @@ -3843,10 +3853,10 @@ def run_module(module, arg_def): copy_handler = CopyHandler( module, is_binary=is_binary, - executable=executable, + is_executable=is_executable, asa_text=asa_text, backup_name=backup_name, - force_lock=force_lock, + force=force, identical_gdg_copy=module.params.get('identical_gdg_copy', False), tmphlq=tmphlq ) @@ -3864,14 +3874,14 @@ def run_module(module, arg_def): # --------------------------------------------------------------------- if is_uss: # Removing the carriage return characters - if src_ds_type == "USS" and not is_binary and not executable: + if src_ds_type == "USS" and not is_binary and not is_executable: new_src = conv_path or src if os.path.isfile(new_src): conv_path = copy_handler.remove_cr_endings(new_src) uss_copy_handler = USSCopyHandler( module, is_binary=is_binary, - executable=executable, + is_executable=is_executable, asa_text=asa_text, aliases=aliases, common_file_args=dict(mode=mode, group=group, owner=owner), @@ -3881,7 +3891,10 @@ def run_module(module, arg_def): original_checksum = None if dest_exists: + res_args["dest_created"] = False original_checksum = get_file_checksum(dest) + else: + res_args["dest_created"] = True dest = uss_copy_handler.copy_to_uss( src, @@ -3890,7 +3903,7 @@ def run_module(module, arg_def): src_ds_type, src_member, member_name, - force, + replace, bool(content) ) res_args['size'] = os.stat(dest).st_size @@ -3939,11 +3952,11 @@ def run_module(module, arg_def): pdse_copy_handler = PDSECopyHandler( module, is_binary=is_binary, - executable=executable, + is_executable=is_executable, asa_text=asa_text, aliases=aliases, backup_name=backup_name, - force_lock=force_lock, + force=force, tmphlq=tmphlq ) @@ -4002,7 +4015,7 @@ def main(): src=dict(type='str'), dest=dict(required=True, type='str'), is_binary=dict(type='bool', default=False), - executable=dict(type='bool', default=False), + is_executable=dict(type='bool', default=False), asa_text=dict(type='bool', default=False), aliases=dict(type='bool', default=False, required=False), identical_gdg_copy=dict(type='bool', default=False), @@ -4094,8 +4107,8 @@ def main(): autoescape=dict(type='bool', default=True), ) ), + replace=dict(type='bool', default=False), force=dict(type='bool', default=False), - force_lock=dict(type='bool', default=False), mode=dict(type='str', required=False), owner=dict(type='str', required=False), group=dict(type='str', required=False), @@ -4107,18 +4120,20 @@ def main(): src=dict(arg_type='data_set_or_path', required=False), dest=dict(arg_type='data_set_or_path', required=True), is_binary=dict(arg_type='bool', required=False, default=False), - executable=dict(arg_type='bool', required=False, default=False), + is_executable=dict(arg_type='bool', required=False, default=False), asa_text=dict(arg_type='bool', required=False, default=False), aliases=dict(arg_type='bool', required=False, default=False), + identical_gdg_copy=dict(type='bool', default=False), content=dict(arg_type='str', required=False), backup=dict(arg_type='bool', default=False, required=False), backup_name=dict(arg_type='data_set_or_path', required=False), local_follow=dict(arg_type='bool', default=True, required=False), remote_src=dict(arg_type='bool', default=False, required=False), - checksum=dict(arg_type='str', required=False), + ignore_sftp_stderr=dict(type='bool', default=True), validate=dict(arg_type='bool', required=False), volume=dict(arg_type='str', required=False), - force_lock=dict(type='bool', default=False), + replace=dict(type='bool', default=False), + force=dict(type='bool', default=False), dest_data_set=dict( arg_type='dict', @@ -4174,7 +4189,7 @@ def main(): not module.params.get("encoding").get("to") and not module.params.get("remote_src") and not module.params.get("is_binary") - and not module.params.get("executable") + and not module.params.get("is_executable") ): module.params["encoding"]["to"] = encode.Defaults.get_default_system_charset() elif ( @@ -4211,7 +4226,6 @@ def main(): shutil.rmtree(path) elif os.path.exists(default_path): shutil.rmtree(default_path) - res_args = update_result(res_args=res_args, original_args=module.params) module.exit_json(**res_args) except CopyOperationError as err: diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index a660d75765..3bfb1b14d6 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -247,7 +247,7 @@ zos_copy: src: /etc/profile remote_src: True - force: True + replace: True dest: {3} async: 50 poll: 0 @@ -408,13 +408,13 @@ def link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, lo cp_res = hosts.all.zos_copy( content=LINK_JCL.format(cobol_src_pds, cobol_src_mem, loadlib_pds, loadlib_mem, loadlib_alias_mem), dest=temp_jcl_uss_path, - force=True, + replace=True, ) # Submit link JCL. job_result = hosts.all.zos_job_submit( src=temp_jcl_uss_path, - location="uss", - wait_time_s=60 + remote_src=True, + wait_time=60 ) for result in job_result.contacted.values(): print(result) @@ -462,8 +462,8 @@ def generate_loadlib(hosts, cobol_src_pds, cobol_src_mems, loadlib_pds, loadlib_ def generate_executable_uss(hosts, dir, src, src_jcl_call): - hosts.all.zos_copy(content=hello_world, dest=src, force=True) - hosts.all.zos_copy(content=call_c_hello_jcl.format(dir), dest=src_jcl_call, force=True) + hosts.all.zos_copy(content=hello_world, dest=src, replace=True) + hosts.all.zos_copy(content=call_c_hello_jcl.format(dir), dest=src_jcl_call, replace=True) hosts.all.shell(cmd="xlc -o hello_world hello_world.c", chdir=dir) hosts.all.shell(cmd="submit {0}".format(src_jcl_call)) verify_exe_src = hosts.all.shell(cmd="{0}/hello_world".format(dir)) @@ -496,10 +496,13 @@ def test_copy_file_to_non_existing_uss_file(ansible_zos_module, src): stat_res = hosts.all.stat(path=dest_path) for result in copy_res.contacted.values(): + print(result) assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_path assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True finally: @@ -508,12 +511,12 @@ def test_copy_file_to_non_existing_uss_file(ansible_zos_module, src): @pytest.mark.uss @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_file=True, force=False, is_remote=False), - dict(src="/etc/profile", is_file=True, force=True, is_remote=False), - dict(src="Example inline content", is_file=False, force=False, is_remote=False), - dict(src="Example inline content", is_file=False, force=True, is_remote=False), - dict(src="/etc/profile", is_file=True, force=False, is_remote=True), - dict(src="/etc/profile", is_file=True, force=True, is_remote=True), + dict(src="/etc/profile", is_file=True, replace=False, is_remote=False), + dict(src="/etc/profile", is_file=True, replace=True, is_remote=False), + dict(src="Example inline content", is_file=False, replace=False, is_remote=False), + dict(src="Example inline content", is_file=False, replace=True, is_remote=False), + dict(src="/etc/profile", is_file=True, replace=False, is_remote=True), + dict(src="/etc/profile", is_file=True, replace=True, is_remote=True), ]) def test_copy_file_to_existing_uss_file(ansible_zos_module, src): hosts = ansible_zos_module @@ -526,18 +529,20 @@ def test_copy_file_to_existing_uss_file(ansible_zos_module, src): assert timestamp is not None if src["is_file"]: - copy_res = hosts.all.zos_copy(src=src["src"], dest=dest_path, force=src["force"], remote_src=src["is_remote"]) + copy_res = hosts.all.zos_copy(src=src["src"], dest=dest_path, replace=src["replace"], remote_src=src["is_remote"]) else: - copy_res = hosts.all.zos_copy(content=src["src"], dest=dest_path, force=src["force"]) + copy_res = hosts.all.zos_copy(content=src["src"], dest=dest_path, replace=src["replace"]) stat_res = hosts.all.stat(path=dest_path) for result in copy_res.contacted.values(): - if src["force"]: + if src["replace"]: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_path assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -570,6 +575,8 @@ def test_copy_file_to_uss_dir(ansible_zos_module, src): assert result.get("changed") is True assert result.get("dest") == dest_path assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None for st in stat_res.contacted.values(): assert st.get("stat").get("exists") is True finally: @@ -592,7 +599,8 @@ def test_copy_file_to_uss_dir_missing_parents(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest - assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None for st in stat_res.contacted.values(): assert st.get("stat").get("exists") is True finally: @@ -616,6 +624,11 @@ def test_copy_local_symlink_to_uss_file(ansible_zos_module): stat_res = hosts.all.stat(path=dest_path) for result in copy_res.contacted.values(): assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True for result in verify_copy.contacted.values(): @@ -642,6 +655,8 @@ def test_copy_local_file_to_uss_file_convert_encoding(ansible_zos_module): assert result.get("changed") is True assert result.get("dest") == dest_path assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True finally: @@ -665,6 +680,8 @@ def test_copy_local_file_to_uss_file_with_absent_remote_tmp_dir(ansible_zos_modu assert result.get("changed") is True assert result.get("dest") == dest_path assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True finally: @@ -685,6 +702,8 @@ def test_copy_inline_content_to_uss_dir(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_path + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True finally: @@ -708,11 +727,10 @@ def test_copy_dir_to_existing_uss_dir_not_forced(ansible_zos_module): src=src_dir, dest=dest_dir, remote_src=True, - force=False + replace=False ) for result in copy_result.contacted.values(): - print(result) assert result.get("msg") is not None assert result.get("changed") is False assert "Error" in result.get("msg") @@ -799,6 +817,9 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding(ansible_zos_module for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None # File z/OS dest is now UTF-8, dump the hex value and compare it to an # expected big-endian version, can't run delegate_to local host so expected @@ -840,6 +861,9 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_ for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None full_outer_file= "{0}/{1}/file3".format(dest_path, level_1) full_iner_file= "{0}/{1}/{2}/file3".format(dest_path, level_1, level_2) @@ -847,10 +871,8 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_ verify_copy_2 = hosts.all.shell(cmd="cat {0}".format(full_iner_file)) for result in verify_copy_1.contacted.values(): - print(result) assert result.get("stdout") == DUMMY_DATA for result in verify_copy_2.contacted.values(): - print(result) assert result.get("stdout") == DUMMY_DATA finally: hosts.all.file(name=dest_path, state="absent") @@ -888,6 +910,9 @@ def test_copy_local_dir_to_non_existing_dir(ansible_zos_module, copy_directory): for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if copy_directory: assert result.get("dest") == "{0}/{1}".format(dest_path, src_basename) @@ -938,6 +963,9 @@ def test_copy_uss_dir_to_non_existing_dir(ansible_zos_module, copy_directory): for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if copy_directory: assert result.get("dest") == "{0}/{1}".format(dest_dir, src_basename) @@ -981,7 +1009,7 @@ def test_copy_local_dir_to_existing_dir_forced(ansible_zos_module, copy_director copy_result = hosts.all.zos_copy( src=source_path, dest=dest_path, - force=True + replace=True ) stat_source_res = hosts.all.stat(path="{0}/{1}".format(dest_path, source_basename)) @@ -994,6 +1022,9 @@ def test_copy_local_dir_to_existing_dir_forced(ansible_zos_module, copy_director for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if copy_directory: assert result.get("dest") == "{0}/{1}".format(dest_path, source_basename) @@ -1042,7 +1073,7 @@ def test_copy_uss_dir_to_existing_dir_forced(ansible_zos_module, copy_directory) src=src_dir, dest=dest_dir, remote_src=True, - force=True + replace=True ) stat_dir_res = hosts.all.stat(path="{0}/{1}".format(dest_dir, src_basename)) @@ -1055,6 +1086,9 @@ def test_copy_uss_dir_to_existing_dir_forced(ansible_zos_module, copy_directory) for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if copy_directory: assert result.get("dest") == "{0}/{1}".format(dest_dir, src_basename) @@ -1105,7 +1139,7 @@ def test_copy_local_nested_dir_to_uss(ansible_zos_module, create_dest): copy_result = hosts.all.zos_copy( src=source_path, dest=dest_path, - force=create_dest + replace=create_dest ) stat_subdir_a_res = hosts.all.stat(path="{0}/subdir_a".format(dest_path)) @@ -1115,6 +1149,8 @@ def test_copy_local_nested_dir_to_uss(ansible_zos_module, create_dest): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_path + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_subdir_a_res.contacted.values(): assert result.get("stat").get("exists") is True assert result.get("stat").get("isdir") is True @@ -1147,7 +1183,7 @@ def test_copy_uss_nested_dir_to_uss(ansible_zos_module, create_dest): src=source_path, dest=dest_path, remote_src=True, - force=create_dest + replace=create_dest ) stat_subdir_a_res = hosts.all.stat(path="{0}/subdir_a".format(dest_path)) @@ -1157,6 +1193,8 @@ def test_copy_uss_nested_dir_to_uss(ansible_zos_module, create_dest): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_path + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_subdir_a_res.contacted.values(): assert result.get("stat").get("exists") is True assert result.get("stat").get("isdir") is True @@ -1202,7 +1240,7 @@ def test_copy_local_dir_and_change_mode(ansible_zos_module, copy_directory): copy_result = hosts.all.zos_copy( src=source_path, dest=dest_path, - force=True, + replace=True, mode=mode ) @@ -1218,6 +1256,9 @@ def test_copy_local_dir_and_change_mode(ansible_zos_module, copy_directory): for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if copy_directory: assert result.get("dest") == dest_subdir @@ -1295,7 +1336,7 @@ def test_copy_uss_dir_and_change_mode(ansible_zos_module, copy_directory): copy_result = hosts.all.zos_copy( src=source_path, dest=dest_path, - force=True, + replace=True, remote_src=True, mode=mode ) @@ -1312,6 +1353,9 @@ def test_copy_uss_dir_and_change_mode(ansible_zos_module, copy_directory): for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if copy_directory: assert result.get("dest") == dest_subdir @@ -1370,13 +1414,17 @@ def test_backup_uss_file(ansible_zos_module, backup): if backup: backup_name = get_random_file_name(dir=TMP_DIRECTORY) - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup_name) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=True, backup=True, backup_name=backup_name) else: - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=True, backup=True) for result in copy_res.contacted.values(): assert result.get("msg") is None backup_name_result = result.get("backup_name") + assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if backup: assert backup_name_result == backup_name @@ -1474,6 +1522,8 @@ def test_copy_template_file(ansible_zos_module, encoding): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_template + assert cp_res.get("src") is not None + assert cp_res.get("dest_created") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 # Checking that all markers got replaced. @@ -1534,7 +1584,7 @@ def test_copy_template_dir(ansible_zos_module): src=temp_dir, dest=dest_path, use_template=True, - force=True + replace=True ) verify_copy_a = hosts.all.shell( @@ -1550,6 +1600,8 @@ def test_copy_template_dir(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_path + assert cp_res.get("src") is not None + assert cp_res.get("dest_created") is not None for v_cp in verify_copy_a.contacted.values(): assert v_cp.get("rc") == 0 # Checking that all markers got replaced. @@ -1620,6 +1672,8 @@ def test_copy_template_file_with_non_default_markers(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_template + assert cp_res.get("src") is not None + assert cp_res.get("dest_created") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 # Checking that all markers got replaced. @@ -1671,6 +1725,8 @@ def test_copy_template_file_to_dataset(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_dataset + assert cp_res.get("src") is not None + assert cp_res.get("dest_created") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 # Checking that all markers got replaced. @@ -1719,6 +1775,7 @@ def test_copy_asa_file_to_asa_sequential(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -1754,11 +1811,11 @@ def test_copy_asa_file_to_asa_partitioned(ansible_zos_module): ) for cp_res in copy_result.contacted.values(): - print(cp_res) assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == full_dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): print(v_cp) assert v_cp.get("rc") == 0 @@ -1813,6 +1870,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -1869,6 +1927,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == full_dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -1925,6 +1984,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -1982,6 +2042,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == full_dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -2033,6 +2094,8 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 # Since OPUT preserves all blank spaces associated @@ -2058,9 +2121,16 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, try: hosts.all.file(path=dest_path, state="directory", mode=mode) permissions_before = hosts.all.stat(path=dest_path) - hosts.all.zos_copy(src=src["src"], dest=dest_path, mode=other_mode) + cp_bef_result = hosts.all.zos_copy(src=src["src"], dest=dest_path, mode=other_mode) permissions = hosts.all.stat(path=dest_path) + for cp_res in cp_bef_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None + for before in permissions_before.contacted.values(): permissions_be_copy = before.get("stat").get("mode") @@ -2070,7 +2140,15 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, assert permissions_be_copy == permissions_af_copy # Extra asserts to ensure change mode rewrite a copy - hosts.all.zos_copy(src=src["src"], dest=dest_path, mode=mode_overwrite) + af_bef_result = hosts.all.zos_copy(src=src["src"], dest=dest_path, mode=mode_overwrite) + + for cp_res in af_bef_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is False + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None + permissions_overwriten = hosts.all.stat(path = full_path) for over in permissions_overwriten.contacted.values(): assert over.get("stat").get("mode") == mode_overwrite @@ -2079,7 +2157,7 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, @pytest.mark.seq -@pytest.mark.parametrize("ds_type, f_lock",[ +@pytest.mark.parametrize("ds_type, force",[ ("pds", True), # Success path, pds locked, force_lock enabled and user authorized ("pdse", True), # Success path, pdse locked, force_lock enabled and user authorized ("seq", True), # Success path, seq locked, force_lock enabled and user authorized @@ -2087,7 +2165,7 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, ("pdse", False), # Module exits with: Unable to write to dest '{0}' because a task is accessing the data set." ("seq", False), # Module exits with: Unable to write to dest '{0}' because a task is accessing the data set." ]) -def test_copy_dest_lock_wrapper(ansible_zos_module, ds_type, f_lock): +def test_copy_dest_lock_wrapper(ansible_zos_module, ds_type, force): retries = 0 max_retries = 5 success = False @@ -2095,8 +2173,8 @@ def test_copy_dest_lock_wrapper(ansible_zos_module, ds_type, f_lock): # Not adding a try/except block here so a real exception can bubble up # and stop pytest immediately (if using -x or --stop). while retries < max_retries: - print(f'Trying dest lock for {ds_type}. Expecting success? {f_lock}. Retry: {retries}.') - result = copy_dest_lock(ansible_zos_module, ds_type, f_lock) + print(f'Trying dest lock for {ds_type}. Expecting success? {force}. Retry: {retries}.') + result = copy_dest_lock(ansible_zos_module, ds_type, force) if result: success = True @@ -2107,7 +2185,7 @@ def test_copy_dest_lock_wrapper(ansible_zos_module, ds_type, f_lock): assert success is True -def copy_dest_lock(ansible_zos_module, ds_type, f_lock): +def copy_dest_lock(ansible_zos_module, ds_type, force): hosts = ansible_zos_module assert_msg = "" @@ -2177,34 +2255,38 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): decho_result = hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) for result in decho_result.contacted.values(): assert_msg = result.get("stdout", "") - print(result) assert result.get("changed") is True assert result.get("failed", False) is False # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) temp_dir = get_random_file_name(dir=TMP_DIRECTORY) - c_src_result = hosts.all.zos_copy(content=c_pgm, dest=f'{temp_dir}/pdse-lock.c', force=True) + c_src_result = hosts.all.zos_copy(content=c_pgm, dest=f'{temp_dir}/pdse-lock.c', replace=True) for result in c_src_result.contacted.values(): assert_msg = result.get("stdout", "") - print(result) assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None jcl_result = hosts.all.zos_copy( content=call_c_jcl.format(temp_dir, dest_data_set), dest=f'{temp_dir}/call_c_pgm.jcl', - force=True + replace=True ) for result in jcl_result.contacted.values(): assert_msg = result.get("stdout", "") - print(result) - assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None subproc_result = hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=f"{temp_dir}/") for result in subproc_result.contacted.values(): assert_msg = result.get("stdout", "") - print(result) assert result.get("changed") is True assert result.get("failed", False) is False @@ -2212,7 +2294,6 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): job_result = hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir=f"{temp_dir}/") for result in job_result.contacted.values(): assert_msg = result.get("stdout", "") - print(result) assert result.get("changed") is True assert result.get("failed", False) is False @@ -2222,23 +2303,25 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): src = src_data_set, dest = dest_data_set, remote_src = True, - force=True, - force_lock=f_lock, + replace=True, + force=force, ) for result in results.contacted.values(): assert_msg = result.get("stdout", "") - print(result) - if f_lock: #and apf_auth_user: + if force: #and apf_auth_user: + print(result) assert result.get("changed") == True assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None # verify that the content is the same verify_copy = hosts.all.shell( cmd="dcat \'{0}\'".format(dest_data_set), executable=SHELL_EXECUTABLE, ) for vp_result in verify_copy.contacted.values(): - print(vp_result) verify_copy_2 = hosts.all.shell( cmd="dcat \'{0}\'".format(src_data_set), executable=SHELL_EXECUTABLE, @@ -2246,7 +2329,7 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): for vp_result_2 in verify_copy_2.contacted.values(): print(vp_result_2) assert vp_result_2.get("stdout") == vp_result.get("stdout") - elif not f_lock: + elif not force: assert result.get("failed") is True assert result.get("changed") == False assert "because a task is accessing the data set" in result.get("msg") @@ -2256,7 +2339,7 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): except AssertionError: # Checking for the error code from when the system thinks both data sets # are identical. - if "FSUM8977" in assert_msg: + if "FSUM8977" in str(assert_msg): return False else: raise @@ -2277,7 +2360,7 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.asa -@pytest.mark.parametrize("ds_type, f_lock",[ +@pytest.mark.parametrize("ds_type, force",[ ("pds", True), # Success path, pds locked, force_lock enabled and user authorized ("pdse", True), # Success path, pdse locked, force_lock enabled and user authorized ("seq", True), # Success path, seq locked, force_lock enabled and user authorized @@ -2285,7 +2368,7 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): ("pdse", False), # Module exits with: Unable to write to dest '{0}' because a task is accessing the data set." ("seq", False), # Module exits with: Unable to write to dest '{0}' because a task is accessing the data set." ]) -def test_copy_dest_lock_wrapper_asa(ansible_zos_module, ds_type, f_lock): +def test_copy_dest_lock_wrapper_asa(ansible_zos_module, ds_type, force): retries = 0 max_retries = 5 success = False @@ -2293,8 +2376,8 @@ def test_copy_dest_lock_wrapper_asa(ansible_zos_module, ds_type, f_lock): # Not adding a try/except block here so a real exception can bubble up # and stop pytest immediately (if using -x or --stop). while retries < max_retries: - print(f'Trying dest lock for {ds_type}. Expecting success? {f_lock}. Retry: {retries}.') - result = copy_asa_dest_lock(ansible_zos_module, ds_type, f_lock) + print(f'Trying dest lock for {ds_type}. Expecting success? {force}. Retry: {retries}.') + result = copy_asa_dest_lock(ansible_zos_module, ds_type, force) if result: success = True @@ -2305,7 +2388,7 @@ def test_copy_dest_lock_wrapper_asa(ansible_zos_module, ds_type, f_lock): assert success is True -def copy_asa_dest_lock(ansible_zos_module, ds_type, f_lock): +def copy_asa_dest_lock(ansible_zos_module, ds_type, force): hosts = ansible_zos_module assert_msg = "" @@ -2348,23 +2431,30 @@ def copy_asa_dest_lock(ansible_zos_module, ds_type, f_lock): # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) temp_dir = get_random_file_name(dir=TMP_DIRECTORY) - c_src_result = hosts.all.zos_copy(content=c_pgm, dest=f'{temp_dir}/pdse-lock.c', force=True) + c_src_result = hosts.all.zos_copy(content=c_pgm, dest=f'{temp_dir}/pdse-lock.c', replace=True) for result in c_src_result.contacted.values(): assert_msg = result.get("stdout", "") - print(result) assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None jcl_result = hosts.all.zos_copy( content=call_c_jcl.format(temp_dir, dest_data_set), dest=f'{temp_dir}/call_c_pgm.jcl', - force=True + replace=True ) for result in jcl_result.contacted.values(): assert_msg = result.get("stdout", "") print(result) assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None subproc_result = hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=f"{temp_dir}/") for result in subproc_result.contacted.values(): @@ -2389,16 +2479,19 @@ def copy_asa_dest_lock(ansible_zos_module, ds_type, f_lock): dest=dest_data_set, remote_src=False, asa_text=True, - force=True, - force_lock=f_lock + replace=True, + force=force ) for result in results.contacted.values(): assert_msg = result.get("stdout", "") print(result) - if f_lock: #and apf_auth_user: + if force: #and apf_auth_user: assert result.get("changed") is True assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None # We need to escape the data set name because we are using cat, using dcat will # bring the trailing empty spaces according to the data set record length. @@ -2474,6 +2567,8 @@ def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 for v_recl in verify_recl.contacted.values(): @@ -2527,6 +2622,8 @@ def test_copy_file_crlf_endings_to_sequential_data_set(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert len(v_cp.get("stdout_lines")) == 2 @@ -2585,6 +2682,8 @@ def test_copy_file_crlf_endings_and_pound_to_seq_data_set(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): print(v_cp) assert v_cp.get("rc") == 0 @@ -2630,6 +2729,8 @@ def test_copy_local_binary_file_without_encoding_conversion(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None finally: hosts.all.zos_data_set(name=dest, state="absent") os.remove(src) @@ -2672,6 +2773,8 @@ def test_copy_remote_binary_file_without_encoding_conversion(ansible_zos_module) assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None finally: hosts.all.zos_data_set(name=dest, state="absent") hosts.all.file(path=src, state="absent") @@ -2709,7 +2812,7 @@ def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True - assert cp_res.get("is_binary") == src["is_binary"] + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -2719,12 +2822,12 @@ def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): @pytest.mark.uss @pytest.mark.seq @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_file=True, force=True, is_remote=False), - dict(src="Example inline content", is_file=False, force=True, is_remote=False), - dict(src="/etc/profile", is_file=True, force=True, is_remote=True), - dict(src="/etc/profile", is_file=True, force=False, is_remote=False), - dict(src="Example inline content", is_file=False, force=False, is_remote=False), - dict(src="/etc/profile", is_file=True, force=False, is_remote=True), + dict(src="/etc/profile", is_file=True, replace=True, is_remote=False), + dict(src="Example inline content", is_file=False, replace=True, is_remote=False), + dict(src="/etc/profile", is_file=True, replace=True, is_remote=True), + dict(src="/etc/profile", is_file=True, replace=False, is_remote=False), + dict(src="Example inline content", is_file=False, replace=False, is_remote=False), + dict(src="/etc/profile", is_file=True, replace=False, is_remote=True), ]) def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): hosts = ansible_zos_module @@ -2734,14 +2837,16 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): hosts.all.zos_data_set(name=dest, type="seq", state="present") if src["is_file"]: - copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) + copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], replace=src["replace"]) else: - copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) + copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, remote_src=src["is_remote"], replace=src["replace"]) for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=dest, state="absent") @@ -2749,10 +2854,10 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): @pytest.mark.uss @pytest.mark.seq @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", force=False, is_remote=False), - dict(src="/etc/profile", force=True, is_remote=False), - dict(src="/etc/profile", force=False, is_remote=True), - dict(src="/etc/profile", force=True, is_remote=True), + dict(src="/etc/profile", replace=False, is_remote=False), + dict(src="/etc/profile", replace=True, is_remote=False), + dict(src="/etc/profile", replace=False, is_remote=True), + dict(src="/etc/profile", replace=True, is_remote=True), ]) def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): hosts = ansible_zos_module @@ -2762,13 +2867,15 @@ def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) - copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) + copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], replace=src["replace"]) for result in copy_result.contacted.values(): - if src["force"]: + if src["replace"]: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -2795,6 +2902,8 @@ def test_copy_ps_to_non_existing_uss_file(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True for result in verify_copy.contacted.values(): @@ -2807,8 +2916,8 @@ def test_copy_ps_to_non_existing_uss_file(ansible_zos_module): @pytest.mark.uss @pytest.mark.seq -@pytest.mark.parametrize("force", [False, True]) -def test_copy_ps_to_existing_uss_file(ansible_zos_module, force): +@pytest.mark.parametrize("replace", [False, True]) +def test_copy_ps_to_existing_uss_file(ansible_zos_module, replace): hosts = ansible_zos_module src_ds = get_tmp_ds_name() dest = get_random_file_name(dir=TMP_DIRECTORY) @@ -2818,17 +2927,19 @@ def test_copy_ps_to_existing_uss_file(ansible_zos_module, force): hosts.all.file(path=dest, state="touch") hosts.all.shell(cmd=f"decho 'test line' '{src_ds}' ") - copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) + copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, replace=replace) stat_res = hosts.all.stat(path=dest) verify_copy = hosts.all.shell( cmd="cat {0}".format(dest), executable=SHELL_EXECUTABLE ) for result in copy_res.contacted.values(): - if force: + if replace: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -2861,6 +2972,9 @@ def test_copy_ps_to_existing_uss_dir(ansible_zos_module): for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True for result in verify_copy.contacted.values(): @@ -2888,6 +3002,7 @@ def test_copy_ps_to_non_existing_ps(ansible_zos_module): assert result.get("changed") is True assert result.get("dest") == dest assert result.get("dest_created") is True + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" @@ -2896,8 +3011,8 @@ def test_copy_ps_to_non_existing_ps(ansible_zos_module): @pytest.mark.seq -@pytest.mark.parametrize("force", [False, True]) -def test_copy_ps_to_empty_ps(ansible_zos_module, force): +@pytest.mark.parametrize("replace", [False, True]) +def test_copy_ps_to_empty_ps(ansible_zos_module, replace): hosts = ansible_zos_module src_ds = get_tmp_ds_name() dest = get_tmp_ds_name() @@ -2906,7 +3021,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): hosts.all.shell(cmd=f"decho 'test line ' '{src_ds}'") hosts.all.shell(cmd=f"dtouch -tseq '{src_ds}'") - copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) + copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, replace=replace) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), executable=SHELL_EXECUTABLE ) @@ -2915,6 +3030,8 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" @@ -2923,8 +3040,8 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): @pytest.mark.seq -@pytest.mark.parametrize("force", [False, True]) -def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): +@pytest.mark.parametrize("replace", [False, True]) +def test_copy_ps_to_non_empty_ps(ansible_zos_module, replace): hosts = ansible_zos_module src_ds = get_tmp_ds_name() dest = get_tmp_ds_name() @@ -2933,16 +3050,18 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): hosts.all.shell(cmd=f"decho 'This is a test ' '{src_ds}' ") hosts.all.shell(cmd=f"decho 'This is a test ' '{dest}' ") - copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) + copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, replace=replace) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), executable=SHELL_EXECUTABLE ) for result in copy_res.contacted.values(): - if force: + if replace: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -2955,8 +3074,8 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): @pytest.mark.seq -@pytest.mark.parametrize("force", [False, True]) -def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): +@pytest.mark.parametrize("replace", [False, True]) +def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, replace): hosts = ansible_zos_module src_ds = get_tmp_ds_name() dest = get_tmp_ds_name() @@ -2965,16 +3084,18 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): hosts.all.shell(cmd=f"decho '{DUMMY_DATA_SPECIAL_CHARS}' '{src_ds}' ") hosts.all.shell(cmd=f"decho '{DUMMY_DATA_SPECIAL_CHARS}' '{dest}' ") - copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) + copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, replace=replace) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), executable=SHELL_EXECUTABLE ) for result in copy_res.contacted.values(): - if force: + if replace: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -2997,13 +3118,17 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): if backup: backup_name = get_tmp_ds_name() - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup_name) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=True, backup=True, backup_name=backup_name) else: - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=True, backup=True) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("backup_name") is not None + assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None result_backup_name = result.get("backup_name") if backup: assert backup_name == result.get("backup_name") @@ -3063,6 +3188,8 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -3103,6 +3230,8 @@ def test_copy_file_to_non_existing_member_implicit(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_member + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -3112,12 +3241,12 @@ def test_copy_file_to_non_existing_member_implicit(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_file=True, force=False, is_remote=False), - dict(src="/etc/profile", is_file=True, force=True, is_remote=False), - dict(src="Example inline content", is_file=False, force=False, is_remote=False), - dict(src="Example inline content", is_file=False, force=True, is_remote=False), - dict(src="/etc/profile", is_file=True, force=False, is_remote=True), - dict(src="/etc/profile", is_file=True, force=True, is_remote=True) + dict(src="/etc/profile", is_file=True, replace=False, is_remote=False), + dict(src="/etc/profile", is_file=True, replace=True, is_remote=False), + dict(src="Example inline content", is_file=False, replace=False, is_remote=False), + dict(src="Example inline content", is_file=False, replace=True, is_remote=False), + dict(src="/etc/profile", is_file=True, replace=False, is_remote=True), + dict(src="/etc/profile", is_file=True, replace=True, is_remote=True) ]) def test_copy_file_to_existing_member(ansible_zos_module, src): hosts = ansible_zos_module @@ -3137,9 +3266,9 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): hosts.all.zos_data_set(name=dest, type="member", state="present") if src["is_file"]: - copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, force=src["force"], remote_src=src["is_remote"]) + copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, replace=src["replace"], remote_src=src["is_remote"]) else: - copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, force=src["force"]) + copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, replace=src["replace"]) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\" > /dev/null 2>/dev/null".format(dest), @@ -3147,10 +3276,12 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): ) for cp_res in copy_result.contacted.values(): - if src["force"]: + if src["replace"]: assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None else: assert cp_res.get("msg") is not None assert cp_res.get("changed") is False @@ -3199,6 +3330,8 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -3210,12 +3343,12 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", force=False), - dict(type="seq", force=True), - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True) + dict(type="seq", replace=False), + dict(type="seq", replace=True), + dict(type="pds", replace=False), + dict(type="pds", replace=True), + dict(type="pdse", replace=False), + dict(type="pdse", replace=True) ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module @@ -3236,7 +3369,7 @@ def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) hosts.all.zos_data_set(name=dest, type="member") - copy_result = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) + copy_result = hosts.all.zos_copy(src=src, dest=dest, replace=args["replace"], remote_src=True) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), @@ -3244,16 +3377,18 @@ def test_copy_data_set_to_existing_member(ansible_zos_module, args): ) for cp_res in copy_result.contacted.values(): - if args["force"]: + if args["replace"]: assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None else: assert cp_res.get("msg") is not None assert cp_res.get("changed") is False for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 - if args["force"]: + if args["replace"]: assert v_cp.get("stdout") != "" finally: hosts.all.zos_data_set(name=src_data_set, state="absent") @@ -3283,6 +3418,7 @@ def test_copy_file_to_non_existing_pdse(ansible_zos_module, is_remote): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_path assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -3312,6 +3448,7 @@ def test_copy_dir_to_non_existing_pdse(ansible_zos_module): assert result.get("changed") is True assert result.get("dest") == dest assert result.get("dest_created") is True + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("rc") == 0 finally: @@ -3344,6 +3481,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): assert result.get("changed") is True assert result.get("dest") == dest assert result.get("dest_created") is True + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert len(result.get("stdout_lines")) == 2 @@ -3384,6 +3522,8 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -3424,6 +3564,7 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -3458,6 +3599,8 @@ def test_copy_pds_to_existing_pds(ansible_zos_module, args): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 @@ -3548,7 +3691,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr src="{0}({1})".format(src_lib, pgm_mem), dest="{0}({1})".format(dest_lib, pgm_mem), remote_src=True, - executable=True, + is_executable=True, aliases=False ) # zos_copy w an executables and its alias: @@ -3556,7 +3699,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr src="{0}({1})".format(src_lib, pgm_mem), dest="{0}({1})".format(dest_lib_aliases, pgm_mem), remote_src=True, - executable=True, + is_executable=True, aliases=True ) @@ -3564,11 +3707,15 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}({1})".format(dest_lib, pgm_mem) + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in copy_res_aliases.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}({1})".format(dest_lib_aliases, pgm_mem) + assert result.get("dest_created") is not None + assert result.get("src") is not None # check ALIAS keyword and name in mls output verify_copy_mls = hosts.all.shell( @@ -3676,11 +3823,14 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): src="{0}({1})".format(src_lib, pgm_mem), dest=uss_dest, remote_src=True, - executable=True, - force=True) + is_executable=True, + replace=True) for result in copy_uss_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None # run executable on USS verify_exe_uss = hosts.all.shell( @@ -3696,7 +3846,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): src="{0}".format(uss_dest), dest="{0}({1})".format(dest_lib, pgm_mem), remote_src=True, - executable=True, + is_executable=True, aliases=False ) # zos_copy from USS file w an executables and its alias: @@ -3704,7 +3854,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): src="{0}".format(uss_dest), dest="{0}({1})".format(dest_lib_aliases, pgm_mem), remote_src=True, - executable=True, + is_executable=True, aliases=True ) @@ -3712,10 +3862,14 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}({1})".format(dest_lib, pgm_mem) + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in copy_res_aliases.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}({1})".format(dest_lib_aliases, pgm_mem) + assert result.get("dest_created") is not None + assert result.get("src") is not None # check ALIAS keyword and name in mls output verify_copy_mls = hosts.all.shell( @@ -3844,7 +3998,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): src="{0}".format(src_lib), dest="{0}".format(dest_lib), remote_src=True, - executable=True, + is_executable=True, aliases=False, dest_data_set={ 'type': "library", @@ -3860,7 +4014,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): src="{0}".format(src_lib), dest="{0}".format(dest_lib_aliases), remote_src=True, - executable=True, + is_executable=True, aliases=True, dest_data_set={ 'type': "library", @@ -3878,7 +4032,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): src="{0}".format(src_lib), dest="{0}".format(dest_lib), remote_src=True, - executable=True, + is_executable=True, aliases=False ) # copy src loadlib to dest library pds w aliases @@ -3886,7 +4040,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): src="{0}".format(src_lib), dest="{0}".format(dest_lib_aliases), remote_src=True, - executable=True, + is_executable=True, aliases=True ) @@ -3894,11 +4048,15 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(dest_lib) + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in copy_res_aliases.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(dest_lib_aliases) + assert result.get("dest_created") is not None + assert result.get("src") is not None # check ALIAS keyword and name in mls output verify_copy_mls = hosts.all.shell( @@ -4055,7 +4213,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): copy_res = hosts.all.zos_copy( src=source_path, dest="{0}".format(dest_lib), - executable=True, + is_executable=True, aliases=False, dest_data_set={ 'type': "pdse", @@ -4071,7 +4229,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): copy_res = hosts.all.zos_copy( src=source_path, dest="{0}".format(dest_lib), - executable=True, + is_executable=True, aliases=False ) @@ -4079,6 +4237,8 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(dest_lib) + assert result.get("dest_created") is not None + assert result.get("src") is not None # check ALIAS keyword and name in mls output verify_copy_mls = hosts.all.shell( @@ -4193,12 +4353,14 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): src="{0}".format(src_lib), dest="{0}".format(uss_dir_path), remote_src=True, - executable=True, + is_executable=True, ) for result in copy_res_uss.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(uss_dir_path) + assert result.get("dest_created") is not None + assert result.get("src") is not None # inspect USS dir contents verify_exe_uss_ls = hosts.all.shell( @@ -4229,7 +4391,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): src="{0}/{1}".format(uss_dir_path, src_lib.upper()), dest="{0}".format(dest_lib), remote_src=True, - executable=True, + is_executable=True, aliases=False ) # copy USS dir to dest library pds w aliases @@ -4237,7 +4399,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): src="{0}/{1}".format(uss_dir_path, src_lib.upper()), dest="{0}".format(dest_lib_aliases), remote_src=True, - executable=True, + is_executable=True, aliases=True ) @@ -4245,11 +4407,16 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(dest_lib) + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in copy_res_aliases.contacted.values(): + print(result) assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(dest_lib_aliases) + assert result.get("dest_created") is not None + assert result.get("src") is not None # check ALIAS keyword and name in mls output verify_copy_mls = hosts.all.shell( @@ -4313,13 +4480,16 @@ def test_copy_executables_uss_to_uss(ansible_zos_module): src=f"{c_dir}/hello_world", dest=dest_uss, remote_src=True, - executable=True, - force=True + is_executable=True, + replace=True ) verify_exe_dst = hosts.all.shell(cmd=f"{c_dir}/hello_world_2") for result in copy_uss_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None for res in verify_exe_dst.contacted.values(): assert res.get("rc") == 0 stdout = res.get("stdout") @@ -4357,8 +4527,8 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): src=f"{c_dir}/hello_world", dest="{0}({1})".format(dest, member), remote_src=True, - executable=True, - force=True + is_executable=True, + replace=True ) cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" exec_res = hosts.all.shell( @@ -4367,6 +4537,9 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): for result in copy_uss_to_mvs_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None for res in exec_res.contacted.values(): assert res.get("rc") == 0 stdout = res.get("stdout") @@ -4410,6 +4583,8 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 @@ -4446,6 +4621,8 @@ def test_copy_multiple_data_set_members(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None verify_copy = hosts.all.shell( cmd="mls {0}".format(dest), @@ -4493,6 +4670,8 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_member + assert result.get("dest_created") is not None + assert result.get("src") is not None verify_copy = hosts.all.shell( cmd="mls {0}".format(dest), @@ -4537,6 +4716,8 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True for result in verify_copy.contacted.values(): @@ -4550,10 +4731,10 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(ds_type="pds", force=False), - dict(ds_type="pds", force=True), - dict(ds_type="pdse", force=False), - dict(ds_type="pdse", force=True) + dict(ds_type="pds", replace=False), + dict(ds_type="pds", replace=True), + dict(ds_type="pdse", replace=False), + dict(ds_type="pdse", replace=True) ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module @@ -4569,17 +4750,19 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): executable=SHELL_EXECUTABLE ) - copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True, force=args["force"]) + copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True, replace=args["replace"]) stat_res = hosts.all.stat(path=dest) verify_copy = hosts.all.shell( cmd="head {0}".format(dest), executable=SHELL_EXECUTABLE ) for result in copy_res.contacted.values(): - if args["force"]: + if args["replace"]: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -4587,7 +4770,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): assert result.get("stat").get("exists") is True for result in verify_copy.contacted.values(): assert result.get("rc") == 0 - if args["force"]: + if args["replace"]: assert result.get("stdout") != "" finally: hosts.all.zos_data_set(name=data_set, state="absent") @@ -4631,6 +4814,8 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True assert result.get("stat").get("isdir") is True @@ -4677,6 +4862,8 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True for result in verify_copy.contacted.values(): @@ -4714,6 +4901,7 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): assert result.get("changed") is True assert result.get("dest") == dest assert result.get("dest_created") is True + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" @@ -4725,10 +4913,10 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True), + dict(type="pds", replace=False), + dict(type="pds", replace=True), + dict(type="pdse", replace=False), + dict(type="pdse", replace=True), ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module @@ -4746,22 +4934,24 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): executable=SHELL_EXECUTABLE ) - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=args["replace"], remote_src=True) verify_copy = hosts.all.shell( cmd="head \"//'{0}'\"".format(dest), executable=SHELL_EXECUTABLE ) for result in copy_res.contacted.values(): - if args["force"]: + if args["replace"]: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False for result in verify_copy.contacted.values(): assert result.get("rc") == 0 - if args["force"]: + if args["replace"]: assert result.get("stdout") != "" finally: hosts.all.zos_data_set(name=src_ds, state="absent") @@ -4804,6 +4994,8 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" @@ -4831,14 +5023,16 @@ def test_backup_pds(ansible_zos_module, args): if args["backup"]: backup_name = get_tmp_ds_name() - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup_name) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=True, backup=True, backup_name=backup_name) else: - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=True, backup=True) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None result_backup_name = result.get("backup_name") assert result_backup_name is not None @@ -4893,6 +5087,8 @@ def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_typ assert cp.get('msg') is None assert cp.get('changed') is True assert cp.get('dest') == dest + assert cp.get("dest_created") is not None + assert cp.get("src") is not None check_vol = hosts.all.shell( cmd="tsocmd \"LISTDS '{0}'\"".format(dest), @@ -4922,6 +5118,8 @@ def test_copy_ksds_to_non_existing_ksds(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_ds + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("dd_names") is not None dd_names = result.get("dd_names") @@ -4938,8 +5136,8 @@ def test_copy_ksds_to_non_existing_ksds(ansible_zos_module): ) @pytest.mark.vsam -@pytest.mark.parametrize("force", [False, True]) -def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): +@pytest.mark.parametrize("replace", [False, True]) +def test_copy_ksds_to_existing_ksds(ansible_zos_module, replace): hosts = ansible_zos_module src_ds = get_tmp_ds_name() dest_ds = get_tmp_ds_name() @@ -4948,14 +5146,16 @@ def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): create_vsam_data_set(hosts, src_ds, "ksds", add_data=True, key_length=12, key_offset=0) create_vsam_data_set(hosts, dest_ds, "ksds", add_data=True, key_length=12, key_offset=0) - copy_res = hosts.all.zos_copy(src=src_ds, dest=dest_ds, remote_src=True, force=force) + copy_res = hosts.all.zos_copy(src=src_ds, dest=dest_ds, remote_src=True, replace=replace) verify_copy = get_listcat_information(hosts, dest_ds, "ksds") for result in copy_res.contacted.values(): - if force: + if replace: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_ds + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -4986,13 +5186,16 @@ def test_backup_ksds(ansible_zos_module, backup): if backup: backup_name = get_tmp_ds_name() - copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, backup_name=backup_name, remote_src=True, force=True) + copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, backup_name=backup_name, remote_src=True, replace=True) else: - copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, remote_src=True, force=True) + copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, remote_src=True, replace=True) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None result_backup_name = result.get("backup_name") assert result_backup_name is not None @@ -5042,6 +5245,12 @@ def test_copy_ksds_to_volume(ansible_zos_module, volumes_on_systems): ) verify_copy = get_listcat_information(hosts, dest_ds, "ksds") + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("dd_names") is not None dd_names = result.get("dd_names") @@ -5098,6 +5307,8 @@ def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in verify_copy.contacted.values(): # The tsocmd returns 5 lines like this: # USER.TEST.DEST @@ -5136,6 +5347,9 @@ def test_ensure_tmp_cleanup(ansible_zos_module): for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None stat_dir = hosts.all.shell( cmd="ls", @@ -5153,32 +5367,36 @@ def test_ensure_tmp_cleanup(ansible_zos_module): @pytest.mark.vsam -@pytest.mark.parametrize("force", [False, True]) -def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option(ansible_zos_module, force): +@pytest.mark.parametrize("replace", [False, True]) +def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option(ansible_zos_module, replace): hosts = ansible_zos_module dest = get_tmp_ds_name() src_file = "/etc/profile" tmphlq = "TMPHLQ" try: hosts.all.zos_data_set(name=dest, type="seq", state="present") - copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, force=force) - copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, force=force) + copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, replace=replace) + copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, replace=replace) verify_copy = None - if force: + if replace: verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\" > /dev/null 2>/dev/null".format(dest), executable=SHELL_EXECUTABLE, ) for cp_res in copy_result.contacted.values(): - if force: + if replace: assert cp_res.get("msg") is None assert cp_res.get("backup_name")[:6] == tmphlq + assert cp_res.get("changed") is True + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None else: assert cp_res.get("msg") is not None assert cp_res.get("changed") is False - if force: + if replace: for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -5188,11 +5406,11 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( @pytest.mark.parametrize("options", [ dict(src="/etc/profile", - force=True, is_remote=False, verbosity="-vvvvv", verbosity_level=5), - dict(src="/etc/profile", force=True, + replace=True, is_remote=False, verbosity="-vvvvv", verbosity_level=5), + dict(src="/etc/profile", replace=True, is_remote=False, verbosity="-vvvv", verbosity_level=4), dict(src="/etc/profile", - force=True, is_remote=False, verbosity="", verbosity_level=0), + replace=True, is_remote=False, verbosity="", verbosity_level=0), ]) def test_display_verbosity_in_zos_copy_plugin(ansible_zos_module, options): """Test the display verbosity, ensure it matches the verbosity_level. @@ -5271,6 +5489,8 @@ def test_copy_seq_gds_to_data_set(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_data_set + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5308,6 +5528,8 @@ def test_copy_data_set_to_new_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5343,6 +5565,8 @@ def test_copy_uss_file_to_new_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5381,6 +5605,8 @@ def test_copy_pds_to_new_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5408,7 +5634,7 @@ def test_copy_data_set_to_previous_gds(ansible_zos_module): src=src_data_set, dest=f"{dest_data_set}(0)", remote_src=True, - force=True + replace=True ) verify_copy = hosts.all.shell(cmd=f"""dcat "{dest_data_set}(0)" """) @@ -5422,6 +5648,8 @@ def test_copy_data_set_to_previous_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5446,7 +5674,7 @@ def test_copy_uss_file_to_previous_gds(ansible_zos_module): src=src_file, dest=f"{dest_data_set}(0)", remote_src=True, - force=True + replace=True ) verify_copy = hosts.all.shell(cmd=f"""dcat "{dest_data_set}(0)" """) @@ -5460,6 +5688,8 @@ def test_copy_uss_file_to_previous_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5487,7 +5717,7 @@ def test_copy_pds_member_to_previous_gds(ansible_zos_module): src=member_src, dest=f"{dest_data_set}(0)", remote_src=True, - force=True + replace=True ) verify_copy = hosts.all.shell(cmd=f"""dcat "{dest_data_set}(0)" """) @@ -5501,6 +5731,8 @@ def test_copy_pds_member_to_previous_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5528,7 +5760,7 @@ def test_copy_pds_to_previous_gds(ansible_zos_module): src=src_data_set, dest=f"{dest_data_set}(0)", remote_src=True, - force=True + replace=True ) verify_copy = hosts.all.shell(cmd=f"""mls "{dest_data_set}(0)" """) @@ -5542,6 +5774,8 @@ def test_copy_pds_to_previous_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5569,7 +5803,7 @@ def test_copy_data_set_to_previous_gds_no_force(ansible_zos_module): src=src_data_set, dest=f"{dest_data_set}(0)", remote_src=True, - force=False + replace=False ) for cp_res in copy_results.contacted.values(): @@ -5602,7 +5836,7 @@ def test_copy_data_set_to_previous_non_existent_gds(ansible_zos_module, generati # Copying to a previous generation that doesn't exist. dest=f"{dest_data_set}({generation})", remote_src=True, - force=True + replace=True ) for cp_res in copy_results.contacted.values(): @@ -5640,6 +5874,9 @@ def test_copy_gdg_to_uss_dir(ansible_zos_module): for cp_res in copy_results.contacted.values(): assert cp_res.get("msg") is None assert cp_res.get("changed") is True + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_res in verify_dest.contacted.values(): assert v_res.get("rc") == 0 assert len(v_res.get("stdout_lines", [])) > 0 @@ -5676,6 +5913,9 @@ def test_copy_gdg_to_gdg(ansible_zos_module, new_gdg): for cp_res in copy_results.contacted.values(): assert cp_res.get("msg") is None assert cp_res.get("changed") is True + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None finally: hosts.all.shell(cmd=f"""drm "{src_data_set}(-1)" """) hosts.all.shell(cmd=f"""drm "{src_data_set}(0)" """) @@ -5714,6 +5954,9 @@ def test_identical_gdg_copy(ansible_zos_module): for result in copy_results.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None finally: src_gdg_result = hosts.all.shell(cmd=f"dls {src_data_set}.*") src_gdgs = [] @@ -5767,6 +6010,9 @@ def test_copy_gdg_to_gdg_dest_attributes(ansible_zos_module): for cp_res in copy_results.contacted.values(): assert cp_res.get("msg") is None assert cp_res.get("changed") is True + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None finally: hosts.all.shell(cmd=f"""drm "{src_data_set}(-1)" """) hosts.all.shell(cmd=f"""drm "{src_data_set}(0)" """) @@ -5796,7 +6042,7 @@ def test_backup_gds(ansible_zos_module): src=src_data_set, dest=dest_data_set, remote_src=True, - force=True, + replace=True, backup=True, backup_name=f"{backup_data_set}(+1)", ) @@ -5808,6 +6054,9 @@ def test_backup_gds(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in backup_check.contacted.values(): assert result.get("rc") == 0 @@ -5841,7 +6090,7 @@ def test_backup_gds_invalid_generation(ansible_zos_module): src=src_data_set, dest=dest_data_set, remote_src=True, - force=True, + replace=True, backup=True, backup_name=f"{backup_data_set}(0)", ) @@ -5878,6 +6127,9 @@ def test_copy_to_dataset_with_special_symbols(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=src_data_set, state="absent") @@ -5987,6 +6239,10 @@ def test_copy_data_set_seq_with_aliases(ansible_zos_module, volumes_on_systems): for result in zos_copy_result.contacted.values(): assert result.get('changed') is True assert result.get('failed', False) is False + assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), executable=SHELL_EXECUTABLE, @@ -6027,6 +6283,9 @@ def test_copy_pds_to_pds_using_dest_alias(ansible_zos_module): for cp_res in copy_results.contacted.values(): assert cp_res.get("msg") is None assert cp_res.get("changed") is True + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None verify_dest = hosts.all.shell( cmd=f"""dcat "{dest_pds}(MEMBER)" """, @@ -6123,7 +6382,7 @@ def test_copy_pdse_loadlib_to_pdse_loadlib_using_aliases(ansible_zos_module): src="{0}".format(src_lib_aliases), dest="{0}".format(dest_lib_aliases), remote_src=True, - executable=True, + is_executable=True, aliases=True, dest_data_set={ 'type': "library", @@ -6139,6 +6398,8 @@ def test_copy_pdse_loadlib_to_pdse_loadlib_using_aliases(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(dest_lib_aliases) + assert result.get("dest_created") is not None + assert result.get("src") is not None verify_copy_mls_aliases = hosts.all.shell( cmd="mls {0}".format(dest_lib), @@ -6201,6 +6462,7 @@ def test_copy_asa_file_to_asa_sequential_with_pound(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -6254,6 +6516,7 @@ def test_copy_seq_data_set_to_seq_asa_with_pounds(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -6344,7 +6607,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member_with_pound(ansible_zos_mo src="{0}({1})".format(src_lib, pgm_mem), dest="{0}({1})".format(dest_lib, dest_pgm_mem), remote_src=True, - executable=True, + is_executable=True, aliases=False ) # zos_copy w an executables and its alias: @@ -6352,7 +6615,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member_with_pound(ansible_zos_mo src="{0}({1})".format(src_lib, pgm_mem), dest="{0}({1})".format(dest_lib_aliases, dest_pgm_mem), remote_src=True, - executable=True, + is_executable=True, aliases=True ) @@ -6360,11 +6623,15 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member_with_pound(ansible_zos_mo assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}({1})".format(dest_lib, dest_pgm_mem) + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in copy_res_aliases.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}({1})".format(dest_lib_aliases, dest_pgm_mem) + assert result.get("dest_created") is not None + assert result.get("src") is not None # check ALIAS keyword and name in mls output verify_copy_mls = hosts.all.shell( @@ -6400,4 +6667,3 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member_with_pound(ansible_zos_mo hosts.all.zos_data_set(name=src_lib, state="absent") hosts.all.zos_data_set(name=dest_lib, state="absent") hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") - \ No newline at end of file From f9e9e65f06b41e3f367e7ed1659717e86c3910de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 12 Aug 2025 12:45:10 -0500 Subject: [PATCH 11/73] [Enabler][zos_operator] Update module interface (#2230) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Test * Fix test xos operator * Fix tab * Fix tab * Fix tab * Fix tab * Fix tab * Fix tab * Fix tab * Fix tab * Add fragment * Fix sanity * Update changelogs/fragments/2230_zos_operator_interface_update.yml Co-authored-by: Alex Moreno * Update changelogs/fragments/2230_zos_operator_interface_update.yml Co-authored-by: Alex Moreno * Update plugins/modules/zos_operator.py Co-authored-by: Alex Moreno * Update plugins/modules/zos_operator.py Co-authored-by: Alex Moreno * Update plugins/modules/zos_operator.py Co-authored-by: Alex Moreno * Fix validation * Modify operation test --------- Co-authored-by: André Marcel Gutiérrez Benítez Co-authored-by: Alex Moreno --- .../2230_zos_operator_interface_update.yml | 4 + plugins/modules/zos_operator.py | 69 +++++++++--- .../modules/test_zos_operator_func.py | 103 +++++++++++++++--- 3 files changed, 140 insertions(+), 36 deletions(-) create mode 100644 changelogs/fragments/2230_zos_operator_interface_update.yml diff --git a/changelogs/fragments/2230_zos_operator_interface_update.yml b/changelogs/fragments/2230_zos_operator_interface_update.yml new file mode 100644 index 0000000000..50c1e6c1e5 --- /dev/null +++ b/changelogs/fragments/2230_zos_operator_interface_update.yml @@ -0,0 +1,4 @@ +breaking_changes: + - zos_operator - Option ``wait_time_s`` is being deprecated in favor of ``wait_time``. New option ``time_unit`` is being added to select + seconds or centiseconds. New return value ``time_unit`` is being added. Return value ``wait_time_s`` is being deprecated in favor of ``wait_time``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2230). \ No newline at end of file diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index ab529cf335..eb8bc34fad 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -50,17 +50,26 @@ type: bool required: false default: false - wait_time_s: + wait_time: description: - Set maximum time in seconds to wait for the commands to execute. - When set to 0, the system default is used. - This option is helpful on a busy system requiring more time to execute commands. - Setting I(wait) can instruct if execution should wait the - full I(wait_time_s). + full I(wait_time). type: int required: false default: 1 + time_unit: + description: + - Set the C(wait_time) unit of time, which can be C(s) (seconds) or C(cs) (centiseconds). + type: str + required: false + default: s + choices: + - s + - cs case_sensitive: description: - If C(true), the command will not be converted to uppercase before @@ -103,11 +112,17 @@ - name: Execute operator command to show jobs, always waiting 5 seconds for response zos_operator: cmd: 'd a,all' - wait_time_s: 5 + wait_time: 5 - name: Display the system symbols and associated substitution texts. zos_operator: cmd: 'D SYMBOLS' + +- name: Execute an operator command to show device status and allocation wait 10 centiseconds. + zos_operator: + cmd: 'd u' + wait_time : 10 + time_unit : 'cs' """ RETURN = r""" @@ -125,16 +140,22 @@ sample: d u,all elapsed: description: - The number of seconds that elapsed waiting for the command to complete. + The number of seconds or centiseconds that elapsed waiting for the command to complete. returned: always type: float sample: 51.53 -wait_time_s: +wait_time: description: - The maximum time in seconds to wait for the commands to execute. + The maximum time in the time_unit set to wait for the commands to execute. returned: always type: int sample: 5 +time_unit: + description: + The time unit set for wait_time. + returned: always + type: str + sample: s content: description: The resulting text from the command submitted. @@ -200,7 +221,7 @@ opercmd = ZOAUImportError(traceback.format_exc()) -def execute_command(operator_cmd, timeout_s=1, preserve=False, *args, **kwargs): +def execute_command(operator_cmd, time_unit, timeout=1, preserve=False, *args, **kwargs): """ Executes an operator command. @@ -208,6 +229,8 @@ def execute_command(operator_cmd, timeout_s=1, preserve=False, *args, **kwargs): ---------- operator_cmd : str Command to execute. + time_unit : str + Unit of time to wait of execution of the command. timeout : int Time until it stops whether it finished or not. preserve : bool @@ -223,15 +246,20 @@ def execute_command(operator_cmd, timeout_s=1, preserve=False, *args, **kwargs): Return code, standard output, standard error and time elapsed from start to finish. """ # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: - timeout_c = 100 * timeout_s + if time_unit == "s": + timeout = 100 * timeout start = timer() - response = opercmd.execute(operator_cmd, timeout=timeout_c, preserve=preserve, *args, **kwargs) + response = opercmd.execute(operator_cmd, timeout=timeout, preserve=preserve, *args, **kwargs) end = timer() rc = response.rc stdout = response.stdout_response stderr = response.stderr_response - elapsed = round(end - start, 2) + if time_unit == "cs": + elapsed = round((end - start) * 100, 2) + else: + elapsed = round(end - start, 2) + return rc, stdout, stderr, elapsed @@ -252,7 +280,8 @@ def run_module(): module_args = dict( cmd=dict(type="str", required=True), verbose=dict(type="bool", required=False, default=False), - wait_time_s=dict(type="int", required=False, default=1), + wait_time=dict(type="int", required=False, default=1), + time_unit=dict(type="str", required=False, choices=["s", "cs"], default="s"), case_sensitive=dict(type="bool", required=False, default=False), ) @@ -294,7 +323,8 @@ def run_module(): # call is returned from run_operator_command, specifying what was run. # result["cmd"] = new_params.get("cmd") result["cmd"] = rc_message.get("call") - result["wait_time_s"] = new_params.get("wait_time_s") + result["wait_time"] = new_params.get("wait_time") + result["time_unit"] = new_params.get("time_unit") result["changed"] = False # rc=0, something succeeded (the calling script ran), @@ -309,7 +339,8 @@ def run_module(): module.fail_json(msg=("A non-zero return code was received : {0}. Review the response for more details.").format(result["rc"]), cmd=result["cmd"], elapsed_time=result["elapsed"], - wait_time_s=result["wait_time_s"], + wait_time=result["wait_time"], + time_unit=result["time_unit"], stderr=str(error) if error is not None else result["content"], stderr_lines=str(error).splitlines() if error is not None else result["content"], changed=result["changed"],) @@ -338,9 +369,10 @@ def parse_params(params): """ arg_defs = dict( cmd=dict(arg_type="str", required=True), - verbose=dict(arg_type="bool", required=False), - wait_time_s=dict(arg_type="int", required=False), - case_sensitive=dict(arg_type="bool", required=False), + verbose=dict(arg_type="bool", required=False, default=False), + wait_time=dict(arg_type="int", required=False, default=1), + time_unit=dict(type="str", required=False, choices=["s", "cs"], default="s"), + case_sensitive=dict(arg_type="bool", required=False, default=False), ) parser = BetterArgParser(arg_defs) new_params = parser.parse_args(params) @@ -369,7 +401,8 @@ def run_operator_command(params): kwargs.update({"verbose": True}) kwargs.update({"debug": True}) - wait_s = params.get("wait_time_s") + wait_time = params.get("wait_time") + time_unit = params.get("time_unit") cmdtxt = params.get("cmd") preserve = params.get("case_sensitive") @@ -381,7 +414,7 @@ def run_operator_command(params): kwargs.update({"wait": True}) args = [] - rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout_s=wait_s, preserve=preserve, *args, **kwargs) + rc, stdout, stderr, elapsed = execute_command(cmdtxt, time_unit=time_unit, timeout=wait_time, preserve=preserve, *args, **kwargs) if rc > 0: message = "\nOut: {0}\nErr: {1}\nRan: {2}".format(stdout, stderr, cmdtxt) diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index 99f4ef0351..1fcb349db8 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -45,7 +45,7 @@ - name: zos_operator zos_operator: cmd: 'd a,all' - wait_time_s: 3 + wait_time: 3 verbose: true register: output @@ -77,15 +77,29 @@ def test_zos_operator_various_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd=command) for result in results.contacted.values(): - assert result["rc"] == expected_rc + print(result) + assert result.get("rc") == expected_rc assert result.get("changed") is changed + assert result.get("msg", False) is False + assert result.get("cmd") == command + assert result.get("elapsed") is not None + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" + assert result.get("content") is not None def test_zos_operator_invalid_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd="invalid,command", verbose=False) for result in results.contacted.values(): + print(result) assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("cmd") is not None + assert result.get("elapsed") is not None + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" + assert result.get("content") is not None def test_zos_operator_invalid_command_to_ensure_transparency(ansible_zos_module): @@ -93,6 +107,12 @@ def test_zos_operator_invalid_command_to_ensure_transparency(ansible_zos_module) results = hosts.all.zos_operator(cmd="DUMP COMM=('ERROR DUMP')", verbose=False) for result in results.contacted.values(): assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("cmd") is not None + assert result.get("elapsed") is not None + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" + assert result.get("content") is not None transparency = False if any('DUMP COMMAND' in str for str in result.get("content")): transparency = True @@ -103,8 +123,13 @@ def test_zos_operator_positive_path(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd="d u,all", verbose=False) for result in results.contacted.values(): - assert result["rc"] == 0 + assert result.get("rc") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("elapsed") is not None + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" assert result.get("content") is not None @@ -112,8 +137,13 @@ def test_zos_operator_positive_path_verbose(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd="d u,all", verbose=True) for result in results.contacted.values(): - assert result["rc"] == 0 + assert result.get("rc") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("elapsed") is not None + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" assert result.get("content") is not None # Traverse the content list for a known verbose keyword and track state is_verbose = False @@ -127,45 +157,55 @@ def test_zos_operator_positive_verbose_with_full_delay(ansible_zos_module): hosts = ansible_zos_module wait_time = 10 results = hosts.all.zos_operator( - cmd="RO *ALL,LOG 'dummy syslog message'", verbose=True, wait_time_s=wait_time + cmd="RO *ALL,LOG 'dummy syslog message'", verbose=True, wait_time=wait_time ) for result in results.contacted.values(): - assert result["rc"] == 0 + assert result.get("rc") == 0 assert result.get("changed") is True - assert result.get("content") is not None + assert result.get("msg", False) is False + assert result.get("cmd") is not None assert result.get("elapsed") > wait_time + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" + assert result.get("content") is not None def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): hosts = ansible_zos_module - wait_time_s=10 + wait_time=10 results = hosts.all.zos_operator( - cmd="d u,all", verbose=True, wait_time_s=wait_time_s + cmd="d u,all", verbose=True, wait_time=wait_time ) for result in results.contacted.values(): - assert result["rc"] == 0 + assert result.get("rc") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("elapsed") <= (2 * wait_time) + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" assert result.get("content") is not None - # Account for slower network - assert result.get('elapsed') <= (2 * wait_time_s) def test_zos_operator_positive_verbose_blocking(ansible_zos_module): hosts = ansible_zos_module if is_zoau_version_higher_than(hosts,"1.2.4.5"): - wait_time_s=5 + wait_time=5 results = hosts.all.zos_operator( - cmd="d u,all", verbose=True, wait_time_s=wait_time_s + cmd="d u,all", verbose=True, wait_time=wait_time ) for result in results.contacted.values(): - assert result["rc"] == 0 + assert result.get("rc") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("elapsed") >= wait_time + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" assert result.get("content") is not None - # Account for slower network - assert result.get('elapsed') >= wait_time_s def test_zos_operator_positive_path_preserve_case(ansible_zos_module): @@ -178,8 +218,13 @@ def test_zos_operator_positive_path_preserve_case(ansible_zos_module): ) for result in results.contacted.values(): - assert result["rc"] == 0 + assert result.get("rc") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("wait_time") is not None + assert result.get("elapsed") is not None + assert result.get("time_unit") == "s" assert result.get("content") is not None # Making sure the output from opercmd logged the command # exactly as it was written. @@ -193,12 +238,34 @@ def test_response_come_back_complete(ansible_zos_module): res = {} res["stdout"] = [] for result in results.contacted.values(): + assert result.get("rc") == 0 + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("wait_time") is not None + assert result.get("elapsed") is not None + assert result.get("time_unit") == "s" + assert result.get("content") is not None stdout = result.get('content') # HASP646 Only appears in the last line that before did not appears last_line = len(stdout) assert "HASP646" in stdout[last_line - 1] +def test_operator_sentiseconds(ansible_zos_module): + hosts = ansible_zos_module + results = hosts.all.zos_operator(cmd="d a", time_unit="cs", wait_time=100) + for result in results.contacted.values(): + assert result.get("rc") == 0 + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("elapsed") is not None + assert result.get("wait_time") is not None + assert result.get("time_unit") == "cs" + assert result.get("content") is not None + + def test_zos_operator_parallel_terminal(get_config): path = get_config with open(path, 'r') as file: From 241fabf208103a7d825c02ae79c304e7100c2364 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Wed, 13 Aug 2025 00:21:27 +0530 Subject: [PATCH 12/73] Updating testcases --- plugins/module_utils/better_arg_parser.py | 4 +- plugins/modules/zos_started_task.py | 151 ++++++++++-------- .../modules/test_zos_started_task_func.py | 61 ++----- 3 files changed, 103 insertions(+), 113 deletions(-) diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index ab0fde9292..d49cb46458 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -330,7 +330,7 @@ def _bool_type(self, contents, resolve_dependencies): if not isinstance(contents, bool): raise ValueError('Invalid argument "{0}" for type "bool".'.format(contents)) return contents - + def _member_name_type(self, contents, resolve_dependencies): """Resolver for data_set type arguments. @@ -362,7 +362,7 @@ def _member_name_type(self, contents, resolve_dependencies): 'Invalid argument "{0}" for type "member_name".'.format(contents) ) return str(contents) - + def _identifier_name_type(self, contents, resolve_dependencies): """Resolver for data_set type arguments. diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 35a1536145..988ba937ac 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2022, 2025 +# Copyright (c) IBM Corporation 2025 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. + from __future__ import (absolute_import, division, print_function) -import traceback __metaclass__ = type @@ -24,7 +24,7 @@ - "Ravella Surendra Babu (@surendra.ravella582)" short_description: Perform operations on started tasks. description: - - Start, display, modify, cancel, force and stop a started task + - start, display, modify, cancel, force and stop a started task options: asid: @@ -39,20 +39,22 @@ type: str device_number: description: - - I(device_number) is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. + - I(device_number) is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. required: false type: str - identifier: + identifier_name: description: - - I(device_number) is the name that identifies the task to be started. This name can be up to 8 characters long. + - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. The first character must be alphabetical. required: false type: str + aliases: + - identifier job_account: description: - - I(job_account) specifies accounting data in the JCL JOB statement for the started task. - If the source JCL was a job and has already accounting data, the value that is specified on this parameter + - I(job_account) specifies accounting data in the JCL JOB statement for the started task. + If the source JCL was a job and has already accounting data, the value that is specified on this parameter overrides the accounting data in the source JCL. required: false type: str @@ -62,26 +64,32 @@ then member_name is used as job_name. required: false type: str + aliases: + - job + - task + - task_name keyword_parameters: description: - - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. - The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than + - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. + The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than 44 characters in length. required: false type: str member_name: description: - - I(member_name) is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL + - I(member_name) is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL for the task to be started. The member can be either a job or a cataloged procedure. required: false type: str + aliases: + - member operation: description: - The started task operation which needs to be performed. - > If I(operation=start) and the data set does not exist on the managed node, no action taken, module completes successfully with I(changed=False). - required: false + required: true type: str choices: - start @@ -97,12 +105,17 @@ type: str reus_asid: description: - - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, - a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified + - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, + a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. + required: false + type: str + choices: + - 'YES' + - 'NO' subsystem_name: description: - - The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, + - The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. required: false type: str @@ -118,11 +131,13 @@ member: "PROCAPP" operation: "start" """ + RETURN = r""" """ from ansible.module_utils.basic import AnsibleModule +import traceback from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser @@ -136,15 +151,15 @@ try: from zoautil_py import opercmd -except Exception: - datasets = ZOAUImportError(traceback.format_exc()) - gdgs = ZOAUImportError(traceback.format_exc()) - -try: - from zoautil_py import exceptions as zoau_exceptions except ImportError: zoau_exceptions = ZOAUImportError(traceback.format_exc()) +# try: +# from zoautil_py import exceptions as zoau_exceptions +# except ImportError: +# zoau_exceptions = ZOAUImportError(traceback.format_exc()) + + def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): """Execute operator command. @@ -173,22 +188,24 @@ def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): stderr = response.stderr_response return rc, stdout, stderr + def prepare_start_command(member, identifier, job_name, job_account, device, volume_serial, subsystem_name, reus_asid, parameters, keyword_parameters): - cmd = 'S '+member + cmd = 'S ' + member if identifier: - cmd = cmd + "." + identifier + "," + device + "," + volume_serial + "," + parameters + cmd = cmd + "." + identifier + "," + device + "," + volume_serial + "," + parameters if job_name: - cmd = cmd + ",jobname=" + job_name + cmd = cmd + ",jobname=" + job_name if job_account: - cmd = cmd + ",jobacct=" + job_account + cmd = cmd + ",jobacct=" + job_account if subsystem_name: - cmd = cmd + ",SUB=" + subsystem_name + cmd = cmd + ",SUB=" + subsystem_name if reus_asid: - cmd = cmd + ",REUSASID=" + reus_asid + cmd = cmd + ",REUSASID=" + reus_asid if keyword_parameters: - cmd = cmd + "," + keyword_parameters + cmd = cmd + "," + keyword_parameters return cmd + def run_module(): """Initialize the module. @@ -210,7 +227,7 @@ def run_module(): 'aliases': ['member'] }, 'identifier_name': { - 'arg_type': 'str', + 'type': 'str', 'required': False, 'aliases': ['identifier'] }, @@ -219,7 +236,7 @@ def run_module(): 'required': False, 'aliases': ['job', 'task_name', 'task'] }, - 'job_account': { #55 chars + 'job_account': { 'type': 'str', 'required': False }, @@ -227,7 +244,7 @@ def run_module(): 'type': 'str', 'required': False }, - 'device_number': { #A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. + 'device_number': { # A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. 'type': 'str', 'required': False }, @@ -235,22 +252,23 @@ def run_module(): 'type': 'str', 'required': False }, - 'subsystem_name': { #The name must be 1 - 4 characters + 'subsystem_name': { # The name must be 1 - 4 characters 'type': 'str', 'required': False }, 'reus_asid': { - 'type': 'bool', + 'type': 'str', 'required': False, - 'choices': ['yes', 'no'] + 'choices': ['YES', 'NO'] }, 'parameters': { 'type': 'str', 'required': False }, - 'keyword_parameters': { #The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than 44 characters in length. + 'keyword_parameters': { 'type': 'str', - 'required': False + 'required': False, + 'no_log': False }, 'asid': { 'type': 'str', @@ -258,8 +276,8 @@ def run_module(): } }, mutually_exclusive=[ - ['job_name', 'identifier'], - ['device_name', 'device_type'] + ['job_name', 'identifier_name'], + ['device_number', 'device_type'] ], supports_check_mode=True ) @@ -352,19 +370,19 @@ def run_module(): module.fail_json( msg="job_account value should not exceed 55 characters.", changed=False - ) + ) if device_number: devnum_len = len(device_number) - if devnum_len not in (3, 5) or ( devnum_len == 5 and not device_number.startswith("/")): + if devnum_len not in (3, 5) or (devnum_len == 5 and not device_number.startswith("/")): module.fail_json( msg="Invalid device_number.", changed=False - ) + ) if subsystem_name and len(job_account) > 4: module.fail_json( msg="The subsystem_name must be 1 - 4 characters.", changed=False - ) + ) # keywaord arguments validation..... wait_s = 5 @@ -388,31 +406,31 @@ def run_module(): started_task_name = started_task_name + "." + identifier else: module.fail_json( - msg="one of job_name, member_name or identifier is needed but all are missing.", - changed=False + msg="one of job_name, member_name or identifier is needed but all are missing.", + changed=False ) if operation == 'start': - ##member name is mandatory + # member name is mandatory if member is None or member.strip() == "": module.fail_json( - msg="member_name is missing which is mandatory.", - changed=False - ) + msg="member_name is missing which is mandatory.", + changed=False + ) cmd = prepare_start_command(member, identifier, job_name, job_account, device, volume_serial, subsystem_name, reus_asid, parameters, keyword_parameters) elif operation == 'display': - cmd = 'd a,'+started_task_name + cmd = 'd a,' + started_task_name elif operation == 'stop': - cmd = 'p '+started_task_name + cmd = 'p ' + started_task_name elif operation == 'cancel': - cmd = 'c '+started_task_name + cmd = 'c ' + started_task_name if asid: - cmd = cmd+',a='+asid + cmd = cmd + ',a=' + asid elif operation == 'force': - cmd = 'force '+started_task_name + cmd = 'force ' + started_task_name if asid: - cmd = cmd+',a='+asid + cmd = cmd + ',a=' + asid elif operation == 'modify': - cmd = 'f '+started_task_name+','+parameters + cmd = 'f ' + started_task_name + ',' + parameters changed = False stdout = "" stderr = "" @@ -422,31 +440,30 @@ def run_module(): stdout = out stderr = err if err == "" or err is None: - stderr = out + stderr = out else: changed = True stdout = out stderr = err - result = dict() if module.check_mode: module.exit_json(**result) result = dict( - changed=changed, - cmd=cmd, - remote_cmd=cmd, - rc=rc, - stdout=stdout, - stderr=stderr, - stdout_lines=stdout.split('\n'), - stderr_lines=stderr.split('\n'), - ) + changed=changed, + cmd=cmd, + remote_cmd=cmd, + rc=rc, + stdout=stdout, + stderr=stderr, + stdout_lines=stdout.split('\n'), + stderr_lines=stderr.split('\n'), + ) module.exit_json(**result) if __name__ == '__main__': - run_module() + run_module() diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index e1a8165b73..5529ff4f62 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -96,27 +96,14 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(TASK_JCL_CONTENT), temp_path) ) - hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True - ) - hosts.all.shell( - cmd="dcp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(temp_path, data_set_name) + cmd="dcp {0}/SAMPLE {1}".format(temp_path, data_set_name) ) - copy_result = hosts.all.zos_copy( - src="{0}(SAMPLE)".format(data_set_name), - dest=PROC_PDS, - remote_src=True, - force=True + hosts.all.shell( + cmd="dcp {0} \"//'{1}(SAMPLE)'\"".format(data_set_name, PROC_PDS) ) - for cp_res in copy_result.contacted.values(): - print(cp_res) - assert cp_res.get("msg") is None - assert cp_res.get("changed") is True - assert cp_res.get("dest") == PROC_PDS - start_results = hosts.all.zos_started_task( operation="start", member="SAMPLE" @@ -176,12 +163,11 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): finally: hosts.all.file(path=temp_path, state="absent") - hosts.all.zos_data_set(name=data_set_name, state="absent") - hosts.all.zos_data_set( - name=f"{PROC_PDS}(SAMPLE)", - state="absent", - type="member", - force=True + hosts.all.shell( + cmd="drm {0}".format(data_set_name) + ) + hosts.all.shell( + cmd="mrm '{0}(SAMPLE)'".format(PROC_PDS) ) def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): @@ -190,31 +176,19 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): data_set_name = get_tmp_ds_name() temp_path = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.file(path=temp_path, state="directory") + hosts.all.shell( cmd="echo {0} > {1}/SAMPLE".format(quote(TASK_JCL_CONTENT), temp_path) ) - hosts.all.zos_data_set( - name=data_set_name, state="present", type="pds", replace=True - ) - hosts.all.shell( - cmd="dcp {0}/SAMPLE \"//'{1}(SAMPLE)'\"".format(temp_path, data_set_name) + cmd="dcp {0}/SAMPLE {1}".format(temp_path, data_set_name) ) - copy_result = hosts.all.zos_copy( - src="{0}(SAMPLE)".format(data_set_name), - dest=PROC_PDS, - remote_src=True, - force=True + hosts.all.shell( + cmd="dcp {0} \"//'{1}(SAMPLE)'\"".format(data_set_name, PROC_PDS) ) - for cp_res in copy_result.contacted.values(): - print(cp_res) - assert cp_res.get("msg") is None - assert cp_res.get("changed") is True - assert cp_res.get("dest") == PROC_PDS - start_results = hosts.all.zos_started_task( operation="start", member="SAMPLE", @@ -240,10 +214,9 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): finally: hosts.all.file(path=temp_path, state="absent") - hosts.all.zos_data_set(name=data_set_name, state="absent") - hosts.all.zos_data_set( - name=f"{PROC_PDS}(SAMPLE)", - state="absent", - type="member", - force=True + hosts.all.shell( + cmd="drm {0}".format(data_set_name) + ) + hosts.all.shell( + cmd="mrm '{0}(SAMPLE)'".format(PROC_PDS) ) \ No newline at end of file From 9e6a4ebb0cd181cc9cdd47021a14196541c97fee Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Thu, 14 Aug 2025 09:22:24 -0600 Subject: [PATCH 13/73] [Bug] [zos_backup_restore] Comment test case and fixes byte removal by ZOAU (#2238) * [Bug] [zos_backup_restore] Comment test case and fixes byte removal by ZOAU (#2236) * Commented backup test * Default space type value to m * Added changelog * Updated docs * Update 2236-space-type-default-zos_backup_restore.yml * Updated changelogs --- ...-space-type-default-zos_backup_restore.yml | 4 + plugins/modules/zos_backup_restore.py | 5 +- .../modules/test_zos_backup_restore.py | 87 ++++++++++--------- 3 files changed, 51 insertions(+), 45 deletions(-) create mode 100644 changelogs/fragments/2238-space-type-default-zos_backup_restore.yml diff --git a/changelogs/fragments/2238-space-type-default-zos_backup_restore.yml b/changelogs/fragments/2238-space-type-default-zos_backup_restore.yml new file mode 100644 index 0000000000..af8e226e28 --- /dev/null +++ b/changelogs/fragments/2238-space-type-default-zos_backup_restore.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_backup_restore - Module documentation stated that default ``space_type`` for a backup was ``m`` but module would use bytes instead. + Fix now uses the correct default space type. + (https://github.com/ansible-collections/ibm_zos_core/pull/2238). diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 7317626897..774543ee4d 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -174,6 +174,7 @@ - Valid units of size are C(k), C(m), C(g), C(cyl), and C(trk). - When I(full_volume=True), I(space_type) defaults to C(g), otherwise default is C(m) type: str + default: m choices: - k - m @@ -412,7 +413,7 @@ def main(): ), ), space=dict(type="int", required=False, aliases=["size"]), - space_type=dict(type="str", required=False, aliases=["unit"], choices=["k", "m", "g", "cyl", "trk"]), + space_type=dict(type="str", required=False, aliases=["unit"], choices=["k", "m", "g", "cyl", "trk"], default="m"), volume=dict(type="str", required=False), full_volume=dict(type="bool", default=False), temp_volume=dict(type="str", required=False, aliases=["dest_volume"]), @@ -431,7 +432,7 @@ def main(): operation = params.get("operation") data_sets = params.get("data_sets", {}) space = params.get("space") - space_type = params.get("space_type") + space_type = params.get("space_type", "m") volume = params.get("volume") full_volume = params.get("full_volume") temp_volume = params.get("temp_volume") diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index 4bb523b894..9911b86a92 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -254,49 +254,50 @@ def test_backup_of_data_set(ansible_zos_module, backup_name, overwrite, recover) delete_data_set_or_file(hosts, backup_name) delete_remnants(hosts) - -@pytest.mark.parametrize( - "backup_name,overwrite", - [ - ("DATA_SET", False), - ("DATA_SET", True), - ("UNIX", False), - ("UNIX", True), - ], -) -def test_backup_of_data_set_when_backup_dest_exists( - ansible_zos_module, backup_name, overwrite -): - hosts = ansible_zos_module - data_set_name = get_tmp_ds_name() - if backup_name == "DATA_SET": - backup_name = get_tmp_ds_name(1,1) - else: - backup_name = get_random_file_name(dir=TMP_DIRECTORY, prefix='.dzp') - try: - create_data_set_or_file_with_contents(hosts, backup_name, DATA_SET_CONTENTS) - assert_data_set_or_file_exists(hosts, backup_name) - create_sequential_data_set_with_contents( - hosts, data_set_name, DATA_SET_CONTENTS - ) - results = hosts.all.zos_backup_restore( - operation="backup", - data_sets=dict(include=data_set_name), - backup_name=backup_name, - overwrite=overwrite, - ) - if overwrite: - assert_module_did_not_fail(results) - for result in results.contacted.values(): - assert result.get("backup_name") == backup_name, \ - f"Backup name '{backup_name}' not found in output" - else: - assert_module_failed(results) - assert_data_set_or_file_exists(hosts, backup_name) - finally: - delete_data_set_or_file(hosts, data_set_name) - delete_data_set_or_file(hosts, backup_name) - delete_remnants(hosts) +# Commenting these tests because of issue https://github.com/ansible-collections/ibm_zos_core/issues/2235 +# which likely is a zoau bug that needs to be fixed. +# @pytest.mark.parametrize( +# "backup_name,overwrite", +# [ +# ("DATA_SET", False), +# ("DATA_SET", True), +# ("UNIX", False), +# ("UNIX", True), +# ], +# ) +# def test_backup_of_data_set_when_backup_dest_exists( +# ansible_zos_module, backup_name, overwrite +# ): +# hosts = ansible_zos_module +# data_set_name = get_tmp_ds_name() +# if backup_name == "DATA_SET": +# backup_name = get_tmp_ds_name(1,1) +# else: +# backup_name = get_random_file_name(dir=TMP_DIRECTORY, prefix='.dzp') +# try: +# create_data_set_or_file_with_contents(hosts, backup_name, DATA_SET_CONTENTS) +# assert_data_set_or_file_exists(hosts, backup_name) +# create_sequential_data_set_with_contents( +# hosts, data_set_name, DATA_SET_CONTENTS +# ) +# results = hosts.all.zos_backup_restore( +# operation="backup", +# data_sets=dict(include=data_set_name), +# backup_name=backup_name, +# overwrite=overwrite, +# ) +# if overwrite: +# assert_module_did_not_fail(results) +# for result in results.contacted.values(): +# assert result.get("backup_name") == backup_name, \ +# f"Backup name '{backup_name}' not found in output" +# else: +# assert_module_failed(results) +# assert_data_set_or_file_exists(hosts, backup_name) +# finally: +# delete_data_set_or_file(hosts, data_set_name) +# delete_data_set_or_file(hosts, backup_name) +# delete_remnants(hosts) @pytest.mark.parametrize( From 01bf4076ff11416aba779488155b657aba38fea5 Mon Sep 17 00:00:00 2001 From: Rohitash Goyal Date: Fri, 15 Aug 2025 02:23:15 +0530 Subject: [PATCH 14/73] Changes already merged to staging1.16 for 761 and 2123 (#2215) * 761 changes already merged to staging1.16 * chnages merged to statging v1.16 as part of 2213 PR * resolving sanity issue in new file creation * Updated zos_job_output * Updated wrong line * Updated tests --------- Co-authored-by: Fernando Flores --- .../2207-SYSIN-support-zos_job_output.yml | 3 + ...213-test-case-conditional-failure-2-19.yml | 16 ++++++ plugins/module_utils/job.py | 14 +++-- plugins/modules/zos_job_output.py | 17 +++++- .../functional/modules/test_zos_copy_func.py | 2 +- .../modules/test_zos_job_output_func.py | 55 ++++++++++++++++++- .../modules/test_zos_job_submit_func.py | 2 +- .../modules/test_zos_script_func.py | 2 +- .../modules/test_zos_unarchive_func.py | 2 +- 9 files changed, 102 insertions(+), 11 deletions(-) create mode 100644 changelogs/fragments/2207-SYSIN-support-zos_job_output.yml create mode 100644 changelogs/fragments/2213-test-case-conditional-failure-2-19.yml diff --git a/changelogs/fragments/2207-SYSIN-support-zos_job_output.yml b/changelogs/fragments/2207-SYSIN-support-zos_job_output.yml new file mode 100644 index 0000000000..230b112eae --- /dev/null +++ b/changelogs/fragments/2207-SYSIN-support-zos_job_output.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_job_output - Adds support to query SYSIN DDs from a job with new option input. + (https://github.com/ansible-collections/ibm_zos_core/pull/2207) \ No newline at end of file diff --git a/changelogs/fragments/2213-test-case-conditional-failure-2-19.yml b/changelogs/fragments/2213-test-case-conditional-failure-2-19.yml new file mode 100644 index 0000000000..72bf5c59ad --- /dev/null +++ b/changelogs/fragments/2213-test-case-conditional-failure-2-19.yml @@ -0,0 +1,16 @@ +trivial: + - test_zos_copy_func.py - modified test case `test_job_script_async` + to resolve porting issues to ansible 2.19. + (https://github.com/ansible-collections/ibm_zos_core/pull/2213). + + - test_zos_job_submit_func.py - modified test case `test_job_submit_async` + to resolve porting issues to ansible 2.19. + (https://github.com/ansible-collections/ibm_zos_core/pull/2213). + + - test_zos_script_func.py - modified test case `test_job_script_async` + to resolve porting issues to ansible 2.19. + (https://github.com/ansible-collections/ibm_zos_core/pull/2213). + + - test_zos_unarchive_func.py - modified test case `test_zos_unarchive_async` + to resolve porting issues to ansible 2.19. + (https://github.com/ansible-collections/ibm_zos_core/pull/2213). \ No newline at end of file diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index d3f2fce065..80f7add084 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -58,7 +58,7 @@ ]) -def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): +def job_output(job_id=None, owner=None, job_name=None, dd_name=None, sysin=False, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. Keyword Parameters @@ -71,6 +71,8 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru The job name search for (default: {None}). dd_name : str The data definition to retrieve (default: {None}). + sysin : bool + The input DD to retrieve SYSIN value (default: {False}). dd_scan : bool Whether or not to pull information from the dd's for this job {default: {True}}. duration : int @@ -112,6 +114,7 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru job_name=job_name, dd_name=dd_name, duration=duration, + sysin=sysin, dd_scan=dd_scan, timeout=timeout, start_time=start_time @@ -128,6 +131,7 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru owner=owner, job_name=job_name, dd_name=dd_name, + sysin=sysin, dd_scan=dd_scan, duration=duration, timeout=timeout, @@ -300,7 +304,7 @@ def _parse_steps(job_str): return stp -def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): +def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, sysin=False, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get job status. Parameters @@ -313,6 +317,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T The job name search for (default: {None}). dd_name : str The data definition to retrieve (default: {None}). + sysin : bool + The input DD SYSIN (default: {False}). dd_scan : bool Whether or not to pull information from the dd's for this job {default: {True}}. duration : int @@ -418,7 +424,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T list_of_dds = [] try: - list_of_dds = jobs.list_dds(entry.job_id) + list_of_dds = jobs.list_dds(entry.job_id, sysin=sysin) except exceptions.DDQueryException: is_dd_query_exception = True @@ -437,7 +443,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T try: # Note, in the event of an exception, eg job has TYPRUN=HOLD # list_of_dds will still be populated with valuable content - list_of_dds = jobs.list_dds(entry.job_id) + list_of_dds = jobs.list_dds(entry.job_id, sysin=sysin) is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False except exceptions.DDQueryException: diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 70451c7cae..13edfbb310 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -33,6 +33,7 @@ like "*". - If there is no dd_name, or if dd_name="?", output of all the dds under the given job will be displayed. + - If SYSIN DDs are needed, C(input) should be set to C(true). version_added: "1.0.0" author: - "Jack Ho (@jacklotusho)" @@ -62,6 +63,12 @@ type: str required: false aliases: [ ddname ] + input: + description: + - Whether to include SYSIN DDs as part of the output. + type: bool + default: false + required: false attributes: action: @@ -91,6 +98,11 @@ job_name: "*" owner: "IBMUSER" dd_name: "?" + +- name: Query a job's output including SYSIN DDs + zos_job_output: + job_id: "JOB00548" + input: true """ RETURN = r""" @@ -496,6 +508,7 @@ def run_module(): job_id=dict(type="str", required=False), job_name=dict(type="str", required=False), owner=dict(type="str", required=False), + input=dict(type="bool", required=False, default=False), dd_name=dict(type="str", required=False, aliases=['ddname']), ) @@ -505,6 +518,7 @@ def run_module(): job_id=dict(type="job_identifier", required=False), job_name=dict(type="job_identifier", required=False), owner=dict(type="str", required=False), + input=dict(type="bool", required=False, default=False), dd_name=dict(type="str", required=False, aliases=['ddname']), ) @@ -524,6 +538,7 @@ def run_module(): job_id = module.params.get("job_id") job_name = module.params.get("job_name") owner = module.params.get("owner") + sysin = module.params.get("input") dd_name = module.params.get("dd_name") if not job_id and not job_name and not owner: @@ -531,7 +546,7 @@ def run_module(): try: results = {} - results["jobs"] = job_output(job_id=job_id, owner=owner, job_name=job_name, dd_name=dd_name) + results["jobs"] = job_output(job_id=job_id, owner=owner, job_name=job_name, dd_name=dd_name, sysin=sysin) for job in results["jobs"]: if "job_not_found" in job: results["changed"] = False diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 3bfb1b14d6..dfa45fa6a8 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -257,7 +257,7 @@ async_status: jid: "{{{{ copy_output.ansible_job_id }}}}" register: job_result - until: job_result.finished + until: job_result.finished | bool retries: 10 delay: 30 diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 64caf0f3ec..7ea1a06d5f 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -16,6 +16,7 @@ __metaclass__ = type from shellescape import quote +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name JCL_FILE_CONTENTS = """//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, @@ -30,6 +31,19 @@ // """ +JCL_FILE_CONTENTS_SYSIN = """//SYSINS JOB (T043JM,JM00,1,0,0,0),'SYSINS - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=OMVSADM +//STEP1 EXEC PGM=BPXBATCH,PARM='SH sleep 1' +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//LISTCAT EXEC PGM=IDCAMS,REGION=4M +//SYSPRINT DD SYSOUT=* +//SYSIN DD * + LISTCAT ENTRIES('{0}') ALL +/* +// +""" + TEMP_PATH = "/tmp/jcl" def test_zos_job_output_no_job_id(ansible_zos_module): @@ -249,7 +263,7 @@ def test_zos_job_output_job_exists(ansible_zos_module): hosts.all.file(path=TEMP_PATH, state="absent") -def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): +def test_zos_job_output_job_exists_with_filtered_dd_name(ansible_zos_module): try: hosts = ansible_zos_module hosts.all.file(path=TEMP_PATH, state="directory") @@ -261,7 +275,7 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): ) hosts.all.file(path=TEMP_PATH, state="absent") dd_name = "JESMSGLG" - results = hosts.all.zos_job_output(job_name="HELLO", ddname=dd_name) + results = hosts.all.zos_job_output(job_name="HELLO", dd_name=dd_name) for result in results.contacted.values(): assert result.get("changed") is True assert result.get("msg", False) is False @@ -310,6 +324,43 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): hosts.all.file(path=TEMP_PATH, state="absent") +def test_zos_job_output_job_exists_with_sysin(ansible_zos_module): + try: + hosts = ansible_zos_module + hosts.all.file(path=TEMP_PATH, state="directory") + data_set_name = get_tmp_ds_name() + result = hosts.all.zos_data_set( + name=data_set_name, + type="seq", + state="present" + ) + print(f"job_submit_result: {result.contacted.values()}") + hosts.all.shell( + cmd=f"echo {quote(JCL_FILE_CONTENTS_SYSIN.format(data_set_name))} > {TEMP_PATH}/SYSIN" + ) + result = hosts.all.zos_job_submit( + src=f"{TEMP_PATH}/SYSIN", remote_src=True, volume=None + ) + print(f"job_submit_result: {result.contacted.values()}") + hosts.all.file(path=TEMP_PATH, state="absent") + sysin = True + results = hosts.all.zos_job_output(job_name="SYSINS", input=sysin) + for result in results.contacted.values(): + print(f"job_output_result: {result}") + assert result.get("changed") is True + for job in result.get("jobs"): + assert len(job.get("dds")) >= 1 + sysin_found = False + for ddname_entry in job.get("dds"): + if ddname_entry.get("dd_name") == "SYSIN": + sysin_found = True + break + assert sysin_found + finally: + hosts.all.zos_data_set(name=data_set_name, state="absent") + hosts.all.file(path=TEMP_PATH, state="absent") + + def test_zos_job_submit_job_id_and_owner_included(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_output(job_id="STC00*", owner="MASTER") diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index efa4140794..ef2e060c12 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -425,7 +425,7 @@ async_status: jid: "{{{{ job_task.ansible_job_id }}}}" register: job_result - until: job_result.finished + until: job_result.finished | bool retries: 20 delay: 5 """ diff --git a/tests/functional/modules/test_zos_script_func.py b/tests/functional/modules/test_zos_script_func.py index 2afa759133..3a73590209 100644 --- a/tests/functional/modules/test_zos_script_func.py +++ b/tests/functional/modules/test_zos_script_func.py @@ -87,7 +87,7 @@ async_status: jid: "{{{{ job_task.ansible_job_id }}}}" register: job_result - until: job_result.finished + until: job_result.finished | bool retries: 20 delay: 5 """ diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 22238326a1..5b4aff3dfd 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -72,7 +72,7 @@ async_status: jid: "{{{{ job_task.ansible_job_id }}}}" register: job_result - until: job_result.finished + until: job_result.finished | bool retries: 20 delay: 5 """ From 8666829268abdecb868f3832b715fd903d0f3110 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Fri, 15 Aug 2025 17:03:09 -0600 Subject: [PATCH 15/73] [Enhancement][zos_fetch] Update module interface return values (#2231) * Updated zos_fetch with new interface * Updated to fix vsam errors * Updated failing scenario * Updated changelogs * Added new changes * Updated zos_fetch * Fixed sanity issues --- .../2231-zos_fetch-interface-update.yml | 3 + plugins/action/zos_fetch.py | 65 +++++++++++++------ plugins/modules/zos_fetch.py | 60 ++++++++++------- .../functional/modules/test_zos_fetch_func.py | 58 +++++++++++++++-- 4 files changed, 135 insertions(+), 51 deletions(-) create mode 100644 changelogs/fragments/2231-zos_fetch-interface-update.yml diff --git a/changelogs/fragments/2231-zos_fetch-interface-update.yml b/changelogs/fragments/2231-zos_fetch-interface-update.yml new file mode 100644 index 0000000000..4f603783f7 --- /dev/null +++ b/changelogs/fragments/2231-zos_fetch-interface-update.yml @@ -0,0 +1,3 @@ +breaking_changes: + - zos_fetch - Return value ``file`` is replaced by ``src``. Return value ``note`` is deprecated, the messages thrown in ``note`` are now returned in ``msg``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2231). diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index 91707f61f7..2da09c0d1d 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -1,4 +1,4 @@ -# Copyright (c) IBM Corporation 2019, 2024 +# Copyright (c) IBM Corporation 2019, 2025 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -57,7 +57,7 @@ def _update_result(result, src, dest, ds_type="USS", is_binary=False): updated_result = dict((k, v) for k, v in result.items()) updated_result.update( { - "file": src, + "src": src, "dest": dest, "data_set_type": data_set_types[ds_type], "is_binary": is_binary, @@ -121,6 +121,7 @@ def run(self, tmp=None, task_vars=None): dest = self._task.args.get('dest') encoding = self._task.args.get('encoding', None) flat = _process_boolean(self._task.args.get('flat'), default=False) + fail_on_missing = _process_boolean(self._task.args.get('fail_on_missing'), default=True) is_binary = _process_boolean(self._task.args.get('is_binary')) ignore_sftp_stderr = _process_boolean( self._task.args.get("ignore_sftp_stderr"), default=True @@ -186,29 +187,55 @@ def run(self, tmp=None, task_vars=None): task_vars=task_vars ) ds_type = fetch_res.get("ds_type") - src = fetch_res.get("file") + src = fetch_res.get("src") remote_path = fetch_res.get("remote_path") - - if fetch_res.get("msg"): - result["msg"] = fetch_res.get("msg") + # Create a dictionary that is a schema for the return values + result = dict( + src="", + dest="", + is_binary=False, + checksum="", + changed=False, + data_set_type="", + msg="", + stdout="", + stderr="", + stdout_lines=[], + stderr_lines=[], + rc=0, + encoding=new_module_args.get("encoding"), + ) + # Populate it with the modules response + result["src"] = fetch_res.get("src") + result["dest"] = fetch_res.get("dest") + result["is_binary"] = fetch_res.get("is_binary", False) + result["checksum"] = fetch_res.get("checksum") + result["changed"] = fetch_res.get("changed", False) + result["data_set_type"] = fetch_res.get("data_set_type") + result["msg"] = fetch_res.get("msg") + result["stdout"] = fetch_res.get("stdout") + result["stderr"] = fetch_res.get("stderr") + result["stdout_lines"] = fetch_res.get("stdout_lines") + result["stderr_lines"] = fetch_res.get("stderr_lines") + result["rc"] = fetch_res.get("rc", 0) + result["encoding"] = fetch_res.get("encoding") + + if fetch_res.get("failed", False): result["stdout"] = fetch_res.get("stdout") or fetch_res.get( "module_stdout" ) result["stderr"] = fetch_res.get("stderr") or fetch_res.get( "module_stderr" ) - result["stdout_lines"] = fetch_res.get("stdout_lines") - result["stderr_lines"] = fetch_res.get("stderr_lines") - result["rc"] = fetch_res.get("rc") result["failed"] = True return result - - elif fetch_res.get("note"): - result["note"] = fetch_res.get("note") + if "No data was fetched." in result["msg"]: + if fail_on_missing: + result["failed"] = True return result except Exception as err: - result["msg"] = "Failure during module execution" + result["msg"] = f"Failure during module execution {msg}" result["stderr"] = str(err) result["stderr_lines"] = str(err).splitlines() result["failed"] = True @@ -229,7 +256,6 @@ def run(self, tmp=None, task_vars=None): # For instance: If src is: USER.TEST.PROCLIB(DATA) # # and dest is: /tmp/, then updated dest would be /tmp/DATA # # ********************************************************** # - if os.path.sep not in self._connection._shell.join_path("a", ""): src = self._connection._shell._unquote(src) source_local = src.replace("\\", "/") @@ -290,15 +316,11 @@ def run(self, tmp=None, task_vars=None): try: if ds_type in SUPPORTED_DS_TYPES: if ds_type == "PO" and os.path.isfile(dest) and not fetch_member: - result[ - "msg" - ] = "Destination must be a directory to fetch a partitioned data set" + result["msg"] = "Destination must be a directory to fetch a partitioned data set" result["failed"] = True return result if ds_type == "GDG" and os.path.isfile(dest): - result[ - "msg" - ] = "Destination must be a directory to fetch a generation data group" + result["msg"] = "Destination must be a directory to fetch a generation data group" result["failed"] = True return result @@ -309,7 +331,8 @@ def run(self, tmp=None, task_vars=None): ignore_stderr=ignore_sftp_stderr, ) if fetch_content.get("msg"): - return fetch_content + result.update(fetch_content) + return result if validate_checksum and ds_type != "GDG" and ds_type != "PO" and not is_binary: new_checksum = _get_file_checksum(dest) diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index 62004698aa..cb270ab311 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -240,8 +240,10 @@ """ RETURN = r""" -file: - description: The source file path or data set on the remote machine. +src: + description: + - The source file path or data set on the remote machine. + - If the source is not found, then src will be empty. returned: success type: str sample: SOME.DATA.SET @@ -266,14 +268,9 @@ returned: success type: str sample: PDSE -note: - description: Notice of module failure when C(fail_on_missing) is false. - returned: failure and fail_on_missing=false - type: str - sample: The data set USER.PROCLIB does not exist. No data was fetched. msg: - description: Message returned on failure. - returned: failure + description: Any important messages from the module. + returned: always type: str sample: The source 'TEST.DATA.SET' does not exist or is uncataloged. stdout: @@ -921,8 +918,23 @@ def run_module(): # ********************************************************** # # Check for data set existence and determine its type # # ********************************************************** # - - res_args = dict() + encoding_dict = {"from": encoding.get("from"), "to": encoding.get("to")} + result = dict( + src=src, + dest="", + is_binary=is_binary, + checksum="", + changed=False, + data_set_type="", + remote_path="", + msg="", + stdout="", + stderr="", + stdout_lines=[], + stderr_lines=[], + rc=0, + encoding=encoding_dict, + ) src_data_set = None ds_type = None @@ -963,7 +975,7 @@ def run_module(): ) else: module.exit_json( - note=("Source '{0}' was not found. No data was fetched.".format(src)) + msg=("Source '{0}' was not found. No data was fetched.".format(src)) ) if "/" in src: @@ -992,7 +1004,7 @@ def run_module(): is_binary, encoding=encoding ) - res_args["remote_path"] = file_path + result["remote_path"] = file_path # ********************************************************** # # Fetch a partitioned data set or one of its members # @@ -1005,9 +1017,9 @@ def run_module(): is_binary, encoding=encoding ) - res_args["remote_path"] = file_path + result["remote_path"] = file_path else: - res_args["remote_path"] = fetch_handler._fetch_pdse( + result["remote_path"] = fetch_handler._fetch_pdse( src_data_set.name, is_binary, encoding=encoding @@ -1027,7 +1039,7 @@ def run_module(): is_binary, encoding=encoding ) - res_args["remote_path"] = file_path + result["remote_path"] = file_path # ********************************************************** # # Fetch a VSAM data set # @@ -1039,32 +1051,32 @@ def run_module(): is_binary, encoding=encoding ) - res_args["remote_path"] = file_path + result["remote_path"] = file_path # ********************************************************** # # Fetch a GDG # # ********************************************************** # elif ds_type == "GDG": - res_args["remote_path"] = fetch_handler._fetch_gdg( + result["remote_path"] = fetch_handler._fetch_gdg( src_data_set.name, is_binary, encoding=encoding ) if ds_type == "USS": - res_args["file"] = src + result["src"] = src else: - res_args["file"] = src_data_set.name + result["src"] = src_data_set.name # Removing the HLQ since the user is probably not expecting it. The module # hasn't returned it ever since it was originally written. Changes made to # add GDG/GDS support started leaving the HLQ behind in the file name. if hlq: - res_args["file"] = res_args["file"].replace(f"{hlq}.", "") + result["src"] = result["src"].replace(f"{hlq}.", "") - res_args["ds_type"] = ds_type - module.exit_json(**res_args) + result["ds_type"] = ds_type + module.exit_json(**result) class ZOSFetchError(Exception): @@ -1094,7 +1106,7 @@ def __init__(self, msg, rc="", stdout="", stderr="", stdout_lines="", stderr_lin stdout_lines=stdout_lines, stderr_lines=stderr_lines, ) - super().__init__(self.msg) + super().__init__(msg) def main(): diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index 222bc5888e..e6673c136d 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -182,6 +182,13 @@ def test_fetch_uss_file_not_present_on_local_machine(ansible_zos_module): assert result.get("data_set_type") == "USS" assert result.get("module_stderr") is None assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert "stdout" in result.keys() + assert "stdout_lines" in result.keys() + assert "stderr" in result.keys() + assert "stderr_lines" in result.keys() + assert "rc" is not None + assert isinstance(result.get("encoding"), dict) finally: if os.path.exists(dest_path): os.remove(dest_path) @@ -191,8 +198,9 @@ def test_fetch_uss_file_replace_on_local_machine(ansible_zos_module): with open("/tmp/profile", "w",encoding="utf-8") as file: file.close() hosts = ansible_zos_module + src = "/etc/profile" params = { - "src":"/etc/profile", + "src": src, "dest":"/tmp/", "flat":True } @@ -206,14 +214,17 @@ def test_fetch_uss_file_replace_on_local_machine(ansible_zos_module): assert result.get("checksum") != local_checksum assert result.get("module_stderr") is None assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: os.remove(dest_path) def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): hosts = ansible_zos_module + src = "/etc/profile" params = { - "src":"/etc/profile", + "src": src, "dest": "/tmp/", "flat":True } @@ -227,6 +238,8 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): assert result.get("changed") is False assert result.get("checksum") == local_checksum assert result.get("module_stderr") is None + assert "msg" in result.keys() + assert result.get("src") is not None finally: os.remove(dest_path) @@ -256,6 +269,8 @@ def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): assert result.get("module_stderr") is None assert result.get("dest") == dest_path assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): @@ -280,6 +295,8 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): assert result.get("module_stderr") is None assert result.get("dest") == dest_path assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: if os.path.exists(dest_path): os.remove(dest_path) @@ -308,6 +325,8 @@ def test_fetch_partitioned_data_set(ansible_zos_module): assert result.get("dest") == dest_path assert os.path.exists(dest_path) assert os.path.isdir(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PDS, state="absent") if os.path.exists(dest_path): @@ -329,7 +348,7 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): cmd=f"echo {quote(KSDS_CREATE_JCL.format(volume_1, test_vsam))} > {temp_jcl_path}/SAMPLE" ) hosts.all.zos_job_submit( - src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", remote_src=True, wait_time=30 ) hosts.all.shell(cmd=f"echo \"{TEST_DATA}\\c\" > {uss_file}") hosts.all.zos_encode( @@ -357,6 +376,8 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): file = open(dest_path, 'r',encoding="utf-8") read_file = file.read() assert read_file == TEST_DATA + assert "msg" in result.keys() + assert result.get("src") is not None finally: if os.path.exists(dest_path): @@ -384,6 +405,8 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): assert result.get("module_stderr") is None assert result.get("dest") == dest_path assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=src_ds, state="absent") if os.path.exists(dest_path): @@ -414,6 +437,8 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): assert result.get("is_binary") is True assert os.path.exists(dest_path) assert os.path.isfile(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PDS, state="absent") if os.path.exists(dest_path): @@ -446,6 +471,8 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): assert result.get("module_stderr") is None assert result.get("is_binary") is True assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): @@ -475,6 +502,8 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): assert result.get("is_binary") is True assert os.path.exists(dest_path) assert os.path.isdir(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PDS, state="absent") if os.path.exists(dest_path): @@ -500,6 +529,8 @@ def test_fetch_sequential_data_set_empty(ansible_zos_module): assert result.get("dest") == dest_path assert os.path.exists(dest_path) assert os.stat(dest_path).st_size == 0 + assert "msg" in result.keys() + assert result.get("src") is not None finally: if os.path.exists(dest_path): os.remove(dest_path) @@ -558,6 +589,8 @@ def test_fetch_partitioned_data_set_member_empty(ansible_zos_module): assert result.get("module_stderr") is None assert os.path.exists(dest_path) assert os.stat(dest_path).st_size == 0 + assert "msg" in result.keys() + assert result.get("src") is not None finally: if os.path.exists(dest_path): os.remove(dest_path) @@ -576,7 +609,7 @@ def test_fetch_missing_uss_file_does_not_fail(ansible_zos_module): results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): assert result.get("changed") is False - assert "note" in result.keys() + assert "msg" in result.keys() assert result.get("module_stderr") is None except Exception: raise @@ -610,7 +643,7 @@ def test_fetch_missing_mvs_data_set_does_not_fail(ansible_zos_module): results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): assert result.get("changed") is False - assert "note" in result.keys() + assert "msg" in result.keys() assert result.get("module_stderr") is None assert not os.path.exists("/tmp/FETCH.TEST.DATA.SET") except Exception: @@ -679,6 +712,7 @@ def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): assert result.get("changed") is True assert result.get("module_stderr") is None assert checksum(dest_path, hash_func=sha256) != local_checksum + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): @@ -718,6 +752,7 @@ def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module) assert result.get("changed") is True assert result.get("module_stderr") is None assert os.path.getmtime(dest_path) != prev_timestamp + assert result.get("src") is not None finally: if os.path.exists(dest_path): shutil.rmtree(dest_path) @@ -736,6 +771,7 @@ def test_fetch_uss_file_insufficient_write_permission_fails(ansible_zos_module): results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): assert "msg" in result.keys() + assert result.get("src") is not None dest_path.close() @@ -776,11 +812,12 @@ def test_fetch_use_data_set_qualifier(ansible_zos_module): try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("data_set_type") == "Sequential" assert result.get("module_stderr") is None assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: if os.path.exists(dest_path): os.remove(dest_path) @@ -807,6 +844,8 @@ def test_fetch_flat_create_dirs(ansible_zos_module, z_python_interpreter): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None + assert "msg" in result.keys() + assert result.get("src") is not None assert os.path.exists(dest_path) finally: if os.path.exists(dest_path): @@ -836,6 +875,8 @@ def test_fetch_sequential_data_set_with_special_chars(ansible_zos_module): assert result.get("module_stderr") is None assert result.get("dest") == dest_path assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): @@ -862,6 +903,8 @@ def test_fetch_gds_from_gdg(ansible_zos_module, generation): assert result.get("changed") is True assert result.get("data_set_type") == "Sequential" assert result.get("module_stderr") is None + assert "msg" in result.keys() + assert result.get("src") is not None # Checking that we got a dest of the form: ANSIBLE.DATA.SET.G0001V01. dest_path = result.get("dest", "") @@ -894,6 +937,7 @@ def test_error_fetch_inexistent_gds(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("failed") is True + assert "msg" in result.keys() assert "does not exist" in result.get("msg", "") finally: @@ -920,6 +964,7 @@ def test_fetch_gdg(ansible_zos_module): assert result.get("changed") is True assert result.get("data_set_type") == "Generation Data Group" assert result.get("module_stderr") is None + assert "msg" in result.keys() # Checking that we got a dest of the form: ANSIBLE.DATA.SET.G0001V01. dest_path = result.get("dest", "") @@ -981,6 +1026,7 @@ def test_fetch_uss_file_relative_path_not_present_on_local_machine(ansible_zos_m assert result.get("module_stderr") is None assert dest == result.get("dest") dest = result.get("dest") + assert "msg" in result.keys() finally: if os.path.exists(dest): From ae042ba0eba78f3f3694df7d99cbe0c049985be2 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Fri, 15 Aug 2025 17:04:44 -0600 Subject: [PATCH 16/73] [Enabler][zos_zfs_resize] Updated zos_zfs_resize test that failed on ansible 2.18 (#2240) * Updated test * Added changelogs * Update 2240-fix2.18-zos_zfs_resize.yml --- .../fragments/2240-fix2.18-zos_zfs_resize.yml | 4 ++++ .../modules/test_zos_zfs_resize_func.py | 16 ++++++++-------- 2 files changed, 12 insertions(+), 8 deletions(-) create mode 100644 changelogs/fragments/2240-fix2.18-zos_zfs_resize.yml diff --git a/changelogs/fragments/2240-fix2.18-zos_zfs_resize.yml b/changelogs/fragments/2240-fix2.18-zos_zfs_resize.yml new file mode 100644 index 0000000000..baabe1ee97 --- /dev/null +++ b/changelogs/fragments/2240-fix2.18-zos_zfs_resize.yml @@ -0,0 +1,4 @@ +trivial: + - test_zos_zfs_resize_func.py - modified test case `test_grow_n_shrink_operations_trace_ds_not_created` and `test_grow_n_shrink_operations_trace_ds` + to resolve issues when testing ansible 2.18. + (https://github.com/ansible-collections/ibm_zos_core/pull/2240). diff --git a/tests/functional/modules/test_zos_zfs_resize_func.py b/tests/functional/modules/test_zos_zfs_resize_func.py index 94aceb3000..01a104b332 100644 --- a/tests/functional/modules/test_zos_zfs_resize_func.py +++ b/tests/functional/modules/test_zos_zfs_resize_func.py @@ -655,10 +655,10 @@ def test_grow_n_shrink_operations_trace_ds(ansible_zos_module, trace_destination assert result.get('new_free_space') >= result.get('old_free_space') assert result.get('space_type') == "k" assert "Printing contents of table at address" in result.get("stdout") - cmd = "dcat \"{0}\" ".format(trace_destination_ds) + cmd = "dcat \"{0}\" | wc -l".format(trace_destination_ds) output_of_trace_file = hosts.all.shell(cmd=cmd) for out in output_of_trace_file.contacted.values(): - assert out.get("stdout") is not None + assert int(out.get("stdout")) != 0 assert result.get('stderr') == "" assert result.get('stderr_lines') == [] @@ -687,10 +687,10 @@ def test_grow_n_shrink_operations_trace_ds(ansible_zos_module, trace_destination assert result.get('new_free_space') <= result.get('old_free_space') assert result.get('space_type') == "k" assert "print of in-memory trace table has completed" in result.get('stdout') - cmd = "dcat \"{0}\" ".format(trace_destination_ds_s) + cmd = "dcat \"{0}\" | wc -l".format(trace_destination_ds_s) output_of_trace_file = hosts.all.shell(cmd=cmd) for out in output_of_trace_file.contacted.values(): - assert out.get("stdout") is not None + assert int(out.get("stdout")) != 0 assert result.get('stderr') == "" assert result.get('stderr_lines') == [] @@ -736,10 +736,10 @@ def test_grow_n_shrink_operations_trace_ds_not_created(ansible_zos_module, trace assert result.get('new_free_space') >= result.get('old_free_space') assert result.get('space_type') == "k" assert "Printing contents of table at address" in result.get("stdout") - cmd = "dcat \"{0}\" ".format(trace_destination_ds) + cmd = "dcat \"{0}\" | wc -l".format(trace_destination_ds) output_of_trace_file = hosts.all.shell(cmd=cmd) for out in output_of_trace_file.contacted.values(): - assert out.get("stdout") is not None + assert int(out.get("stdout")) != 0 assert result.get('stderr') == "" assert result.get('stderr_lines') == [] @@ -762,10 +762,10 @@ def test_grow_n_shrink_operations_trace_ds_not_created(ansible_zos_module, trace assert result.get('new_free_space') <= result.get('old_free_space') assert result.get('space_type') == "k" assert "print of in-memory trace table has completed" in result.get('stdout') - cmd = "dcat \"{0}\" ".format(trace_destination_ds_s) + cmd = "dcat \"{0}\" | wc -l".format(trace_destination_ds_s) output_of_trace_file = hosts.all.shell(cmd=cmd) for out in output_of_trace_file.contacted.values(): - assert out.get("stdout") is not None + assert int(out.get("stdout")) != 0 assert result.get('stderr') == "" assert result.get('stderr_lines') == [] From 08a27e35dd67d4d2b6536670fbba59f58e8c7604 Mon Sep 17 00:00:00 2001 From: Mayank Mani Date: Sat, 16 Aug 2025 04:36:01 +0530 Subject: [PATCH 17/73] [Enhancement][zos_data_set]Support for NOSCRATCH option when deleting datasets (#2210) * add nonscratch parameter in main module * added support for nonscratch in module_utils * added test case * added fragments * FIXED * changed in fragments * Updated changelogs * reviewed comments * fixed * removed print statement * removed extra validation * [Enabler] [2053] [zos_data_set] Add_support_for_noscratch_option (#2202) * add nonscratch parameter in main module * added support for nonscratch in module_utils * added test case * added fragments * FIXED * changed in fragments * Updated changelogs * reviewed comments * fixed * removed print statement * removed extra validation --------- Co-authored-by: Fernando Flores * [Enhancement] SYSIN DDs support for zos_job_output (#2207) * adding sysin support * Adding the test case * changelog addition * sanity issue resolving * removing redundant files * review comments incorporation * [Tests] [Ansible 2.19] Fix conditionals failures when executing playbooks in test cases (#2213) * editing and testing modules * changelog addition * Update and rename 2202-zos_data_set-Support-noscratch-options.yml to 2210-zos_data_set-Support-noscratch-options.yml as this branch was already merged in staging now creating new PR to merge in dev so i chnage the pull request no in fragments * [zos_job_submit] Fix TYPRUN=COPY, TYPRUN=HOLD and TYPRUN=JCLHOLD handling (#2229) * Fix TYPRUN handling * Add changelog fragment * Remove commented code * Update module documentation * Updated changelogs * Updated changelogs * Update zos_job_submit calls * Corrected some more zos_job_submit calls * Updated test * Updated changelogs * Updated changelogs * Added comment in tests * Updated wrong zos_copy call --------- Co-authored-by: Fernando Flores Co-authored-by: Rohitash Goyal Co-authored-by: Alex Moreno --- .../2207-SYSIN-support-zos_job_output.yml | 3 - ...zos_data_set-Support-noscratch-options.yml | 10 ++ ...15-test-case-conditional-failure-2-19.yml} | 11 +- docs/source/modules/zos_job_submit.rst | 4 +- plugins/action/zos_job_submit.py | 2 +- plugins/module_utils/data_set.py | 26 +-- plugins/modules/zos_data_set.py | 51 +++++- plugins/modules/zos_job_submit.py | 71 ++++---- .../modules/test_zos_data_set_func.py | 160 +++++++++++++++++- .../modules/test_zos_job_submit_func.py | 36 ++-- 10 files changed, 291 insertions(+), 83 deletions(-) delete mode 100644 changelogs/fragments/2207-SYSIN-support-zos_job_output.yml create mode 100644 changelogs/fragments/2210-zos_data_set-Support-noscratch-options.yml rename changelogs/fragments/{2213-test-case-conditional-failure-2-19.yml => 2215-test-case-conditional-failure-2-19.yml} (79%) diff --git a/changelogs/fragments/2207-SYSIN-support-zos_job_output.yml b/changelogs/fragments/2207-SYSIN-support-zos_job_output.yml deleted file mode 100644 index 230b112eae..0000000000 --- a/changelogs/fragments/2207-SYSIN-support-zos_job_output.yml +++ /dev/null @@ -1,3 +0,0 @@ -minor_changes: - - zos_job_output - Adds support to query SYSIN DDs from a job with new option input. - (https://github.com/ansible-collections/ibm_zos_core/pull/2207) \ No newline at end of file diff --git a/changelogs/fragments/2210-zos_data_set-Support-noscratch-options.yml b/changelogs/fragments/2210-zos_data_set-Support-noscratch-options.yml new file mode 100644 index 0000000000..aa9ff7330d --- /dev/null +++ b/changelogs/fragments/2210-zos_data_set-Support-noscratch-options.yml @@ -0,0 +1,10 @@ +minor_changes: + - zos_data_set - Adds `noscratch` option to allow uncataloging + a data set without deleting it from the volume's VTOC. + (https://github.com/ansible-collections/ibm_zos_core/pull/2210) +trivial: + - data_set - Internal updates to support the noscratch option. + https://github.com/ansible-collections/ibm_zos_core/pull/2210) + - test_zos_data_set_func - added test case to verify the `noscratch` option + functionality in zos_data_set module. + (https://github.com/ansible-collections/ibm_zos_core/pull/2210). diff --git a/changelogs/fragments/2213-test-case-conditional-failure-2-19.yml b/changelogs/fragments/2215-test-case-conditional-failure-2-19.yml similarity index 79% rename from changelogs/fragments/2213-test-case-conditional-failure-2-19.yml rename to changelogs/fragments/2215-test-case-conditional-failure-2-19.yml index 72bf5c59ad..64d8bea6d2 100644 --- a/changelogs/fragments/2213-test-case-conditional-failure-2-19.yml +++ b/changelogs/fragments/2215-test-case-conditional-failure-2-19.yml @@ -1,16 +1,19 @@ +minor_changes: + - zos_job_output - Adds support to query SYSIN DDs from a job with new option input. + (https://github.com/ansible-collections/ibm_zos_core/pull/2215) trivial: - test_zos_copy_func.py - modified test case `test_job_script_async` to resolve porting issues to ansible 2.19. - (https://github.com/ansible-collections/ibm_zos_core/pull/2213). + (https://github.com/ansible-collections/ibm_zos_core/pull/2215). - test_zos_job_submit_func.py - modified test case `test_job_submit_async` to resolve porting issues to ansible 2.19. - (https://github.com/ansible-collections/ibm_zos_core/pull/2213). + (https://github.com/ansible-collections/ibm_zos_core/pull/2215). - test_zos_script_func.py - modified test case `test_job_script_async` to resolve porting issues to ansible 2.19. - (https://github.com/ansible-collections/ibm_zos_core/pull/2213). + (https://github.com/ansible-collections/ibm_zos_core/pull/2215). - test_zos_unarchive_func.py - modified test case `test_zos_unarchive_async` to resolve porting issues to ansible 2.19. - (https://github.com/ansible-collections/ibm_zos_core/pull/2213). \ No newline at end of file + (https://github.com/ansible-collections/ibm_zos_core/pull/2215). \ No newline at end of file diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 0468e85df1..6d31b6abd0 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -765,7 +765,9 @@ jobs Job status `TYPRUN=SCAN` indicates that the job had the TYPRUN parameter with SCAN option. - Job status `NOEXEC` indicates that the job had the TYPRUN parameter with COPY option. + Job status `TYPRUN=COPY` indicates that the job had the TYPRUN parameter with COPY option. + + Job status `HOLD` indicates that the job had the TYPRUN parameter with either the HOLD or JCLHOLD options. Jobs where status can not be determined will result in None (NULL). diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 5456fb3ec7..944f5af212 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -150,7 +150,7 @@ def run(self, tmp=None, task_vars=None): src=source_full, dest=dest_file, mode="0666", - force=True, + replace=True, encoding=module_args.get('encoding'), remote_src=False, ) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 3c055d0ff1..778fa7a290 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -241,7 +241,7 @@ def ensure_present( return True @staticmethod - def ensure_absent(name, volumes=None, tmphlq=None): + def ensure_absent(name, volumes=None, tmphlq=None, noscratch=False): """Deletes provided data set if it exists. Parameters @@ -252,13 +252,15 @@ def ensure_absent(name, volumes=None, tmphlq=None): The volumes the data set may reside on. tmphlq : str High Level Qualifier for temporary datasets. + noscratch : bool + If True, the data set is uncataloged but not physically removed from the volume. Returns ------- bool Indicates if changes were made. """ - changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=tmphlq) + changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=tmphlq, noscratch=noscratch) return changed # ? should we do additional check to ensure member was actually created? @@ -1003,7 +1005,7 @@ def attempt_catalog_if_necessary(name, volumes, tmphlq=None): return present, changed @staticmethod - def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): + def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None, noscratch=False): """Attempts to catalog a data set if not already cataloged, then deletes the data set. This is helpful when a data set currently cataloged is not the data @@ -1019,6 +1021,8 @@ def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): The volumes the data set may reside on. tmphlq : str High Level Qualifier for temporary datasets. + noscratch : bool + If True, the data set is uncataloged but not physically removed from the volume. Returns ------- @@ -1039,7 +1043,7 @@ def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) if present: - DataSet.delete(name) + DataSet.delete(name, noscratch=noscratch) changed = True present = False else: @@ -1074,7 +1078,7 @@ def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): if present: try: - DataSet.delete(name) + DataSet.delete(name, noscratch=noscratch) except DatasetDeleteError: try: DataSet.uncatalog(name, tmphlq=tmphlq) @@ -1101,14 +1105,14 @@ def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) if present: - DataSet.delete(name) + DataSet.delete(name, noscratch=noscratch) changed = True present = False else: present = DataSet.data_set_cataloged(name, None, tmphlq=tmphlq) if present: try: - DataSet.delete(name) + DataSet.delete(name, noscratch=noscratch) changed = True present = False except DatasetDeleteError: @@ -1414,7 +1418,7 @@ def create( return changed @staticmethod - def delete(name): + def delete(name, noscratch=False): """A wrapper around zoautil_py datasets.delete() to raise exceptions on failure. @@ -1428,7 +1432,7 @@ def delete(name): DatasetDeleteError When data set deletion fails. """ - rc = datasets.delete(name) + rc = datasets.delete(name, noscratch=noscratch) if rc > 0: raise DatasetDeleteError(name, rc) @@ -2721,7 +2725,7 @@ def ensure_present(self, tmp_hlq=None, replace=False, force=False): self.set_state("present") return rc - def ensure_absent(self, tmp_hlq=None): + def ensure_absent(self, tmp_hlq=None, noscratch=False): """Removes the data set. Parameters @@ -2734,7 +2738,7 @@ def ensure_absent(self, tmp_hlq=None): int Indicates if changes were made. """ - rc = DataSet.ensure_absent(self.name, self.volumes, tmphlq=tmp_hlq) + rc = DataSet.ensure_absent(self.name, self.volumes, tmphlq=tmp_hlq, noscratch=noscratch) if rc == 0: self.set_state("absent") return rc diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index d03bbb1268..25d2ef073c 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -298,6 +298,15 @@ type: bool required: false default: false + noscratch: + description: + - "When C(state=absent), specifies whether to keep the data set's entry in the VTOC." + - If C(noscratch=True), the data set is uncataloged but not physically removed from the volume. + The Data Set Control Block is not removed from the VTOC. + - This is the equivalent of using C(NOSCRATCH) in an C(IDCAMS DELETE) command. + type: bool + required: false + default: false volumes: description: - > @@ -575,6 +584,15 @@ type: bool required: false default: false + noscratch: + description: + - "When C(state=absent), specifies whether to keep the data set's entry in the VTOC." + - If C(noscratch=True), the data set is uncataloged but not physically removed from the volume. + The Data Set Control Block is not removed from the VTOC. + - This is the equivalent of using C(NOSCRATCH) in an C(IDCAMS DELETE) command. + type: bool + required: false + default: false extended: description: - Sets the I(extended) attribute for Generation Data Groups. @@ -734,6 +752,12 @@ name: someds.name.here state: absent +- name: Uncatalog a data set but do not remove it from the volume. + zos_data_set: + name: someds.name.here + state: absent + noscratch: true + - name: Delete a data set if it exists. If data set not cataloged, check on volume 222222 for the data set, and then catalog and delete if found. zos_data_set: name: someds.name.here @@ -1404,7 +1428,7 @@ def get_data_set_handler(**params): ) -def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): +def perform_data_set_operations(data_set, state, replace, tmp_hlq, force, noscratch): """Calls functions to perform desired operations on one or more data sets. Returns boolean indicating if changes were made. @@ -1439,7 +1463,7 @@ def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): elif state == "absent" and data_set.data_set_type == "gdg": changed = data_set.ensure_absent(force=force) elif state == "absent": - changed = data_set.ensure_absent(tmp_hlq=tmp_hlq) + changed = data_set.ensure_absent(tmp_hlq=tmp_hlq, noscratch=noscratch) elif state == "cataloged": changed = data_set.ensure_cataloged(tmp_hlq=tmp_hlq) elif state == "uncataloged": @@ -1586,6 +1610,11 @@ def parse_and_validate_args(params): required=False, default=False, ), + noscratch=dict( + type="bool", + required=False, + default=False, + ), ), ), # For individual data set args @@ -1676,6 +1705,11 @@ def parse_and_validate_args(params): required=False, default=False, ), + noscratch=dict( + type="bool", + required=False, + default=False, + ), mutually_exclusive=[ ["batch", "name"], # ["batch", "state"], @@ -1788,6 +1822,11 @@ def run_module(): required=False, default=False, ), + noscratch=dict( + type="bool", + required=False, + default=False, + ), ), ), # For individual data set args @@ -1868,6 +1907,11 @@ def run_module(): required=False, default=False ), + noscratch=dict( + type="bool", + required=False, + default=False + ), ) result = dict(changed=False, message="", names=[]) @@ -1895,6 +1939,8 @@ def run_module(): module.params["replace"] = None if module.params.get("record_format") is not None: module.params["record_format"] = None + if module.params.get("noscratch") is not None: + module.params["noscratch"] = None elif module.params.get("type") is not None: if module.params.get("type") in DATA_SET_TYPES_VSAM: # For VSAM types set the value to nothing and let the code manage it @@ -1921,6 +1967,7 @@ def run_module(): replace=data_set_params.get("replace"), tmp_hlq=data_set_params.get("tmp_hlq"), force=data_set_params.get("force"), + noscratch=data_set_params.get("noscratch"), ) result["changed"] = result["changed"] or current_changed except Exception as e: diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index d347efcc33..436abae70f 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -268,7 +268,8 @@ - Job status `SYS` indicates a system failure. - Job status `?` indicates status can not be determined. - Job status `TYPRUN=SCAN` indicates that the job had the TYPRUN parameter with SCAN option. - - Job status `NOEXEC` indicates that the job had the TYPRUN parameter with COPY option. + - Job status `TYPRUN=COPY` indicates that the job had the TYPRUN parameter with COPY option. + - Job status `HOLD` indicates that the job had the TYPRUN parameter with either the HOLD or JCLHOLD options. - Jobs where status can not be determined will result in None (NULL). type: str sample: AC @@ -1068,7 +1069,9 @@ def run_module(): duration = job_output_txt[0].get("duration") if not None else duration job_output_txt = parsing_job_response(job_output_txt, duration) - if duration >= wait_time: + result["duration"] = duration + job_msg = job_output_txt[0].get("ret_code", {}).get("msg") + if duration >= wait_time and job_msg != "HOLD": result["failed"] = True result["changed"] = False _msg = ("The JCL submitted with job id {0} but appears to be a long " @@ -1113,19 +1116,13 @@ def run_module(): job_ret_code.update({"msg_txt": _msg}) raise Exception(_msg) - if job_ret_code_code is not None and job_ret_code_msg == 'NOEXEC': - job_dd_names = job_output_txt[0].get("dds") - jes_jcl_dd = search_dictionaries("dd_name", "JESJCL", job_dd_names) - # These are the conditions for a job run with TYPRUN=COPY. - if not jes_jcl_dd: - job_ret_code.update({"msg": "TYPRUN=COPY"}) - _msg = ("The job was run with TYPRUN=COPY. " - "This way, the steps are not executed, but the JCL is validated and stored " - "in the JES spool. " - "Please review the job log for further details.") - job_ret_code.update({"msg_txt": _msg}) - - if job_ret_code_code is None or job_ret_code.get("msg") == 'NOEXEC': + if job_ret_code_msg == 'HOLD': + _msg = ("The job was run with TYPRUN=HOLD or TYPRUN=JCLHOLD " + "to request special job processing. This will result in no completion, " + "no return code, no job steps and changed will be set to false.") + job_ret_code.update({"msg_txt": _msg}) + is_changed = False + elif job_ret_code_code is None and job_ret_code.get("msg") == 'NOEXEC': # If there is no job_ret_code_code (Job return code) it may NOT be an error, # some jobs will never return have an RC, eg Started tasks(which are not supported), # so further analyze the @@ -1135,21 +1132,21 @@ def run_module(): jes_jcl_dd = search_dictionaries("dd_name", "JESJCL", job_dd_names) # Its possible jobs don't have a JESJCL which are active and this would - # cause an index out of range error. + # mean the job had TYPRUN=COPY. if not jes_jcl_dd: - _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "." - _msg = ("The job return code was not available in the job log, " - "please review the job log{0}".format(_msg_detail)) + job_ret_code.update({"msg": "TYPRUN=COPY"}) + _msg = ("The job was run with TYPRUN=COPY. " + "This way, the steps are not executed, but the JCL is validated and stored " + "in the JES spool. " + "Please review the job log for further details.") job_ret_code.update({"msg_txt": _msg}) - raise Exception(_msg) - - jes_jcl_dd_content = jes_jcl_dd[0].get("content") - jes_jcl_dd_content_str = " ".join(jes_jcl_dd_content) - # The regex can be r"({0})\s*=\s*(COPY|HOLD|JCLHOLD|SCAN)" once zoau support is in. - special_processing_keyword = re.search(r"({0})\s*=\s*(SCAN)" - .format("|".join(JOB_SPECIAL_PROCESSING)), jes_jcl_dd_content_str) + else: + jes_jcl_dd_content = jes_jcl_dd[0].get("content") + jes_jcl_dd_content_str = " ".join(jes_jcl_dd_content) + # The regex can be r"({0})\s*=\s*(COPY|HOLD|JCLHOLD|SCAN)" once zoau support is in. + special_processing_keyword = re.search(r"({0})\s*=\s*(SCAN)" + .format("|".join(JOB_SPECIAL_PROCESSING)), jes_jcl_dd_content_str) - if job_ret_code_msg == 'NOEXEC': job_ret_code.update({"msg": special_processing_keyword[0]}) job_ret_code.update({"code": None}) job_ret_code.update({"msg_code": None}) @@ -1158,19 +1155,17 @@ def run_module(): "return code or job steps and changed will be false." .format(job_submitted_id, special_processing_keyword[0])}) is_changed = False - else: - # The job_ret_code_code is None at this point, but the job_ret_code_msg_code could be populated - # so check both and provide a proper response. + elif job_ret_code_code is None: + # The job_ret_code_code is None at this point, but the job_ret_code_msg_code could be populated + # so check both and provide a proper response. - if job_ret_code_msg_code is None: - _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "." - _msg = ("The job return code was not available in the job log, " - "please review the job log{0}".format(_msg_detail)) - job_ret_code.update({"msg_txt": _msg}) - raise Exception(_msg) + if job_ret_code_msg_code is None: + _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "." + _msg = ("The job return code was not available in the job log, " + "please review the job log{0}".format(_msg_detail)) + job_ret_code.update({"msg_txt": _msg}) + raise Exception(_msg) - # raise Exception("The job return code was not available in the job log, " - # "please review the job log and error {0}.".format(job_ret_code_msg)) elif job_ret_code_code != 0 and max_rc is None: _msg = ("The job return code {0} was non-zero in the " "job output, this job has failed.".format(str(job_ret_code_code))) diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index ec188868df..2154248845 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -162,7 +162,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 + src=TEMP_PATH + "/SAMPLE", remote_src=True, wait_time=30 ) # verify data set creation was successful @@ -221,7 +221,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 + src=TEMP_PATH + "/SAMPLE", remote_src=True, wait_time=30 ) # verify data set creation was successful for result in results.contacted.values(): @@ -267,7 +267,7 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_ hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 + src=TEMP_PATH + "/SAMPLE", remote_src=True, wait_time=30 ) # verify data set creation was successful for result in results.contacted.values(): @@ -315,7 +315,7 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 + src=TEMP_PATH + "/SAMPLE", remote_src=True, wait_time=30 ) # verify data set creation was successful for result in results.contacted.values(): @@ -356,7 +356,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present( hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) - results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30) + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", remote_src=True, wait_time=30) # verify data set creation was successful for result in results.contacted.values(): @@ -371,7 +371,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present( hosts.all.file(path=TEMP_PATH + "/SAMPLE", state="absent") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_2, dataset)), TEMP_PATH)) - results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30) + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", remote_src=True, wait_time=30) # verify data set creation was successful for result in results.contacted.values(): @@ -1161,3 +1161,151 @@ def test_gdg_deletion_when_absent(ansible_zos_module): assert result.get("changed") is False assert result.get("module_stderr") is None assert result.get("failed") is None + +def test_data_set_delete_with_noscratch(ansible_zos_module, volumes_on_systems): + """ + Tests that 'state: absent' with 'noscratch: true' correctly uncatalogs + a data set but leaves its physical entry in the VTOC. + """ + volumes = Volume_Handler(volumes_on_systems) + volume = volumes.get_available_vol() + hosts = ansible_zos_module + dataset = get_tmp_ds_name(2, 2) + + try: + # Arrange: Create the test data set on the specific volume + hosts.all.zos_data_set( + name=dataset, + type='seq', + state='present', + volumes=[volume], + space_primary=1, + space_type='m' + ) + + # Act: Delete the dataset using the noscratch option + results = hosts.all.zos_data_set( + name=dataset, + state='absent', + noscratch=True + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + # Assert 1: Verify the data set is GONE from the catalog. + # This is the first part of the test, where we check that the data set + results = hosts.all.zos_data_set( + name=dataset, + state='absent', + ) + for result in results.contacted.values(): + assert result.get("changed") is False + # catalog_check = hosts.all.command(f"dls '{dataset}'", failed_when=False) + # for result in catalog_check.contacted.values(): + # # Assert that the command failed (non-zero return code) + # assert result.get("rc") != 0 + # Assert 2: Verify the data set is STILL on the volume's VTOC. + # This is the crucial second half of the test. + # We can do this by trying to delete it again, but specifying the volume. + # If this delete reports "changed: true", it's proof that it found and + # deleted the uncataloged data set from the VTOC. + vtoc_check_and_delete = hosts.all.zos_data_set( + name=dataset, + state='absent', + volumes=volume + ) + for result in vtoc_check_and_delete.contacted.values(): + # This assertion proves the data set existed on the volume's VTOC. + assert result.get("changed") is True + finally: + # Cleanup: Perform a final, full delete from the volume since it's still there. + # We provide the volume to ensure it can be found and deleted. + hosts.all.zos_data_set( + name=dataset, + state='absent', + volumes=[volume] + ) + +def test_batch_uncatalog_with_noscratch_suboption(ansible_zos_module, volumes_on_systems): + """ + Tests that the 'noscratch: true' sub-option works correctly when used inside a + batch list to uncatalog multiple data sets. + """ + hosts = ansible_zos_module + volume = Volume_Handler(volumes_on_systems).get_available_vol() + + # Define two separate data sets for the batch operation + dataset_1 = get_tmp_ds_name() + dataset_2 = get_tmp_ds_name() + + try: + # --- Arrange --- + # Create both data sets in a preliminary batch operation so they exist + setup_results = hosts.all.zos_data_set( + batch=[ + {'name': dataset_1, 'type': 'seq', 'state': 'present', 'volumes': volume}, + {'name': dataset_2, 'type': 'seq', 'state': 'present', 'volumes': volume} + ] + ) + for result in setup_results.contacted.values(): + assert result.get("changed") is True + + # --- Act --- + # Run the main test: a batch uncatalog where both items use noscratch + act_results = hosts.all.zos_data_set( + batch=[ + {'name': dataset_1, 'state': 'absent', 'noscratch': True}, + {'name': dataset_2, 'state': 'absent', 'noscratch': True} + ] + ) + # # Assert on the main action results + for result in act_results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set( + name=dataset_1, + state='absent', + ) + for result in results.contacted.values(): + assert result.get("changed") is False + results = hosts.all.zos_data_set( + name=dataset_2, + state='absent', + ) + for result in results.contacted.values(): + assert result.get("changed") is False + + # # --- Verification Assertions --- + # Assert 2: Verify the data set is STILL on the volume's VTOC. + # This is the crucial second half of the test. + # We can do this by trying to delete it again, but specifying the volume. + # If this delete reports "changed: true", it's proof that it found and + # deleted the uncataloged data set from the VTOC. + + vtoc_check_and_delete = hosts.all.zos_data_set( + name=dataset_1, + state='absent', + volumes=volume + ) + for result in vtoc_check_and_delete.contacted.values(): + # This assertion proves the data set existed on the volume's VTOC + assert result.get("changed") is True + + vtoc_check_and_delete = hosts.all.zos_data_set( + name=dataset_2, + state='absent', + volumes=volume + ) + for result in vtoc_check_and_delete.contacted.values(): + # This assertion proves the data set existed on the volume's VTOC + assert result.get("changed") is True + finally: + # --- Cleanup --- + # Ensure both data sets are fully deleted from the volume's VTOC. + # This is critical because the test's main action leaves them on disk. + hosts.all.zos_data_set( + batch=[ + {'name': dataset_1, 'state': 'absent', 'volumes': [volume]}, + {'name': dataset_2, 'state': 'absent', 'volumes': [volume]} + ] + ) diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index ef2e060c12..788c481a18 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -1585,11 +1585,11 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): "to": "IBM-1047" },) for result in results.contacted.values(): - # With ZOAU 1.3.3 changes now code and return msg_code are 0 and 0000 respectively. - # assert result.get("changed") is False + # With ZOAU 1.3.6 changes now code and return msg_code are both None, now + # being consistent with the rest of the possible TYPRUN cases. # When running a job with TYPRUN=COPY, a copy of the JCL will be kept in the JES spool, so # effectively, the system is changed even though the job didn't run. - assert result.get("changed") is False + assert result.get("changed") is True assert result.get("jobs") is not None job = result.get("jobs")[0] @@ -1623,12 +1623,11 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): assert dds.get("content") is not None rc = job.get("ret_code") - assert rc.get("msg") == "NOEXEC" - assert rc.get("code") == None + assert rc.get("msg") == "TYPRUN=COPY" + assert rc.get("code") is None assert rc.get("msg_code") is None - assert rc.get("msg_txt") is not None assert re.search( - r'NOEXEC.', + r'The job was run with TYPRUN=COPY.', repr(rc.get("msg_txt")) ) @@ -1674,12 +1673,12 @@ def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): rc = job.get("ret_code") assert re.search( - r'long running job', - repr(rc.get("msg_txt")) + r'The job was run with TYPRUN=HOLD or TYPRUN=JCLHOLD', + repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) ) - assert rc.get("code") is None - assert rc.get("msg") == "HOLD" - assert rc.get("msg_code") is None + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") == "HOLD" + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): @@ -1723,12 +1722,12 @@ def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): rc = job.get("ret_code") assert re.search( - r'long running job', - repr(rc.get("msg_txt")) + r'The job was run with TYPRUN=HOLD or TYPRUN=JCLHOLD', + repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) ) - assert rc.get("code") is None - assert rc.get("msg") == "HOLD" - assert rc.get("msg_code") is None + assert result.get("jobs")[0].get("ret_code").get("code") is None + assert result.get("jobs")[0].get("ret_code").get("msg") == "HOLD" + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None @pytest.mark.parametrize("generation", ["0", "-1"]) @@ -1981,5 +1980,8 @@ def test_job_submit_async(get_config): assert result.returncode == 0 assert "ok=2" in result.stdout assert "changed=2" in result.stdout + # Commenting this assertion as this will cause a failure when a warning is displayed + # e.g. [WARNING]: Using force uses operations that are subject to race conditions and ... + # Which is a normal warning coming from zos_copy operation. assert result.stderr == "" From adf4db5b3da75fe5f3754b09d7e4409400ffdaad Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Tue, 19 Aug 2025 10:14:41 -0600 Subject: [PATCH 18/73] [Enhancement][zos_blockinfile][2.0] Update zos_blockinfile module interface (#2243) * Updated result and added aliases * Added result dict * Updated indentation * Added expected keys in zos_blockinfile * Added return values to the docs * Added changelog * Update 2243-zos_blockinfile-interface-update.yml * Fixed tests --- .../2243-zos_blockinfile-interface-update.yml | 4 ++ plugins/modules/zos_blockinfile.py | 49 +++++++++++++---- .../modules/test_zos_blockinfile_func.py | 54 +++++++++++++++++++ 3 files changed, 96 insertions(+), 11 deletions(-) create mode 100644 changelogs/fragments/2243-zos_blockinfile-interface-update.yml diff --git a/changelogs/fragments/2243-zos_blockinfile-interface-update.yml b/changelogs/fragments/2243-zos_blockinfile-interface-update.yml new file mode 100644 index 0000000000..257804db29 --- /dev/null +++ b/changelogs/fragments/2243-zos_blockinfile-interface-update.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_blockinfile - Adds alias ``after`` for option ``insertafter`` and alias ``before`` for option ``insertbefore``. + Adds new return values ``stdout_lines`` and ``stderr_lines`` that returns any outstanding stdout or stderr message split in lines. + (https://github.com/ansible-collections/ibm_zos_core/pull/2243). diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 86e65958a2..06b2f59542 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -81,6 +81,7 @@ - Choices are EOF or '*regex*'. - Default is EOF. required: false + aliases: ['after'] type: str insertbefore: description: @@ -92,6 +93,7 @@ at the end of the file. - Choices are BOF or '*regex*'. required: false + aliases: ['before'] type: str marker_begin: description: @@ -346,6 +348,14 @@ returned: failure type: str sample: BGYSC1311E Iconv error, cannot open converter from ISO-88955-1 to IBM-1047 +stdout_lines: + description: List of strings containing individual lines from stdout. + returned: failure + type: list +stderr_lines: + description: List of strings containing individual lines from stderr. + returned: failure + type: list rc: description: The return code from ZOAU dmod when json.loads() fails to parse the result from dmod returned: failure @@ -511,10 +521,12 @@ def main(): aliases=['content'] ), insertafter=dict( - type='str' + type='str', + aliases=['after'], ), insertbefore=dict( - type='str' + type='str', + aliases=['before'], ), marker_begin=dict( type='str', @@ -560,8 +572,8 @@ def main(): state=dict(arg_type='str', default='present', choices=['absent', 'present']), marker=dict(arg_type='str', default='# {mark} ANSIBLE MANAGED BLOCK', required=False), block=dict(arg_type='str', default='', aliases=['content'], required=False), - insertafter=dict(arg_type='str', required=False), - insertbefore=dict(arg_type='str', required=False), + insertafter=dict(arg_type='str', required=False, aliases=['after'],), + insertbefore=dict(arg_type='str', required=False, aliases=['before'],), marker_begin=dict(arg_type='str', default='BEGIN', required=False), marker_end=dict(arg_type='str', default='END', required=False), encoding=dict(arg_type='str', default='IBM-1047', required=False), @@ -572,7 +584,16 @@ def main(): mutually_exclusive=[['insertbefore', 'insertafter']], indentation=dict(arg_type='int', default=0, required=False) ) - result = dict(changed=False, cmd='', found=0) + result = dict( + changed=False, + cmd='', + found=0, + stdout='', + stdout_lines=[], + stderr='', + stderr_lines=[], + rc=0, + ) try: parser = better_arg_parser.BetterArgParser(arg_defs) parsed_args = parser.parse_args(module.params) @@ -665,16 +686,22 @@ def main(): # The triple double quotes is required for special characters (/_) been scape ret = json.loads("""{0}""".format(stdout)) except Exception: - messageDict = dict(msg="ZOAU dmod return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc) - if result.get('backup_name'): - messageDict['backup_name'] = result['backup_name'] - module.fail_json(**messageDict) + result.update( + dict( + msg="ZOAU dmod return content is NOT in json format", + stdout=str(stdout), + stdout_lines=stdout.splitlines(), + stderr=str(stderr), + stderr_lines=stderr.splitlines(), + rc=rc + ) + ) + module.fail_json(**result) result['cmd'] = ret['data']['commands'] result['changed'] = ret['data']['changed'] result['found'] = ret['data']['found'] - # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case - # That information will be given with 'changed' and 'found' + # Only populate 'rc' if stderr is not empty to not fail the playbook run in a nomatch case if len(stderr): result['stderr'] = str(stderr) result['rc'] = rc diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index ec21fdf331..ed739e81a4 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -440,6 +440,18 @@ BACKUP_OPTIONS = [None, "SEQ", "MEM"] +expected_keys = [ + 'changed', + 'cmd', + 'found', + 'stdout', + 'stdout_lines', + 'stderr', + 'stderr_lines', + 'rc' +] + + def set_uss_environment(ansible_zos_module, content, file): hosts = ansible_zos_module hosts.all.file(path=file, state="touch") @@ -489,6 +501,7 @@ def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX @@ -512,6 +525,7 @@ def test_uss_block_insertbefore_regex_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX @@ -535,6 +549,7 @@ def test_uss_block_insertafter_eof_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF @@ -558,6 +573,7 @@ def test_uss_block_insertbefore_bof_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF @@ -585,6 +601,7 @@ def test_uss_block_insertafter_regex_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX_CUSTOM @@ -613,6 +630,7 @@ def test_uss_block_insertbefore_regex_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX_CUSTOM @@ -640,6 +658,7 @@ def test_uss_block_insertafter_eof_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF_CUSTOM @@ -667,6 +686,7 @@ def test_uss_block_insertbefore_bof_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF_CUSTOM @@ -689,6 +709,7 @@ def test_uss_block_absent_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT @@ -714,6 +735,7 @@ def test_uss_block_absent_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT @@ -737,6 +759,7 @@ def test_uss_block_replace_insertafter_regex_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER @@ -760,6 +783,7 @@ def test_uss_block_replace_insertbefore_regex_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE @@ -783,6 +807,7 @@ def test_uss_block_replace_insertafter_eof_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_EOF_CUSTOM @@ -806,6 +831,7 @@ def test_uss_block_replace_insertbefore_bof_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_BOF_CUSTOM @@ -832,6 +858,7 @@ def test_uss_block_replace_insertafter_regex_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_EOF_REGEX_CUSTOM @@ -858,6 +885,7 @@ def test_uss_block_replace_insertbefore_regex_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_BOF_REGEX_CUSTOM @@ -884,6 +912,7 @@ def test_uss_block_replace_insertafter_eof_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF_CUSTOM @@ -910,6 +939,7 @@ def test_uss_block_replace_insertbefore_bof_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF_CUSTOM @@ -934,6 +964,7 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERT_WITH_INDENTATION @@ -955,6 +986,7 @@ def test_uss_block_insert_with_doublequotes(ansible_zos_module): for result in results.contacted.values(): print(result) assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES @@ -980,6 +1012,7 @@ def test_uss_block_insertafter_eof_with_backup(ansible_zos_module): for result in results.contacted.values(): backup_name = result.get("backup_name") assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) assert backup_name is not None results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): @@ -1008,6 +1041,7 @@ def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) assert result.get("backup_name") == uss_backup_file cmd_str = f"cat {uss_backup_file}" results = ansible_zos_module.all.shell(cmd=cmd_str) @@ -1045,6 +1079,7 @@ def test_ds_block_insertafter_regex(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX @@ -1071,6 +1106,7 @@ def test_ds_block_insertbefore_regex(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX @@ -1097,6 +1133,7 @@ def test_ds_block_insertafter_eof(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF @@ -1123,6 +1160,7 @@ def test_ds_block_insertbefore_bof(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF @@ -1149,6 +1187,7 @@ def test_ds_block_replace_insertafter_regex(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER @@ -1175,6 +1214,7 @@ def test_ds_block_replace_insertbefore_regex(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE @@ -1201,6 +1241,7 @@ def test_ds_block_replace_insertafter_eof(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF @@ -1227,6 +1268,7 @@ def test_ds_block_replace_insertbefore_bof(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF @@ -1252,6 +1294,7 @@ def test_ds_block_absent(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT @@ -1317,6 +1360,7 @@ def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, ds results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERT_WITH_INDENTATION @@ -1349,6 +1393,7 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) backup_ds_name = result.get("backup_name") assert backup_ds_name is not None results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) @@ -1459,6 +1504,7 @@ def test_gdd_ds_insert_block(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["src"])) for result in results.contacted.values(): assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" @@ -1467,6 +1513,7 @@ def test_gdd_ds_insert_block(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["src"])) for result in results.contacted.values(): assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" @@ -1476,6 +1523,7 @@ def test_gdd_ds_insert_block(ansible_zos_module): results = hosts.all.zos_blockinfile(**params_w_bck) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) assert result.get("rc") == 0 backup = ds_name + "(0)" results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) @@ -1503,6 +1551,7 @@ def test_special_characters_ds_insert_block(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) src = ds_name.replace('$', "\$") results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(src)) for result in results.contacted.values(): @@ -1513,6 +1562,7 @@ def test_special_characters_ds_insert_block(ansible_zos_module): results = hosts.all.zos_blockinfile(**params_w_bck) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) assert result.get("rc") == 0 backup = backup.replace('$', "\$") results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) @@ -1547,6 +1597,7 @@ def test_uss_encoding(ansible_zos_module, encoding): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ENCODING @@ -1568,6 +1619,7 @@ def test_special_characters_ds_insert_block(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) src = ds_name.replace('$', "\$") results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(src)) for result in results.contacted.values(): @@ -1578,6 +1630,7 @@ def test_special_characters_ds_insert_block(ansible_zos_module): results = hosts.all.zos_blockinfile(**params_w_bck) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) assert result.get("rc") == 0 backup = backup.replace('$', "\$") results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) @@ -1626,6 +1679,7 @@ def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF From 37634200a20f6a7417dd9705c779d07eb4bdfcbc Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Wed, 20 Aug 2025 11:30:11 -0600 Subject: [PATCH 19/73] [Enhancement][zos_tso_command][2.0] Update module interface (#2245) * Updated tests * Updated tests * Added changelog --- .../2245-zos_tso_command-interface-update.yml | 9 ++ plugins/modules/zos_tso_command.py | 9 +- .../modules/test_zos_tso_command_func.py | 126 +++++++++++++++--- 3 files changed, 121 insertions(+), 23 deletions(-) create mode 100644 changelogs/fragments/2245-zos_tso_command-interface-update.yml diff --git a/changelogs/fragments/2245-zos_tso_command-interface-update.yml b/changelogs/fragments/2245-zos_tso_command-interface-update.yml new file mode 100644 index 0000000000..334467c579 --- /dev/null +++ b/changelogs/fragments/2245-zos_tso_command-interface-update.yml @@ -0,0 +1,9 @@ +breaking_changes: + - zos_tso_command - Return value ``content`` is replaced by ``stdout_lines``. + Return value ``lines`` is replaced by ``line_count``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2245). + +minor_changes: + - zos_tso_command - New return value ``stdout`` contains the command stdout in raw format. + New return value ``stderr_lines`` that returns any outstanding stdout message split in lines. + (https://github.com/ansible-collections/ibm_zos_core/pull/2245). \ No newline at end of file diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 9a422a2f04..c50850acef 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -206,8 +206,7 @@ def copy_rexx_and_run_commands(script, commands, module, max_rc): The command result details. """ command_detail_json = [] - delete_on_close = True - tmp_file = NamedTemporaryFile(delete=delete_on_close) + tmp_file = NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: f.write(script) chmod(tmp_file.name, S_IEXEC | S_IREAD | S_IWRITE) @@ -216,9 +215,11 @@ def copy_rexx_and_run_commands(script, commands, module, max_rc): command_results = {} command_results["command"] = command command_results["rc"] = rc - command_results["content"] = stdout.split("\n") - command_results["lines"] = len(command_results.get("content", [])) + command_results["stdout"] = stdout + command_results["stdout_lines"] = stdout.split("\n") + command_results["line_count"] = len(command_results.get("stdout_lines", [])) command_results["stderr"] = stderr + command_results["stderr_lines"] = stderr.split("\n") if rc <= max_rc: command_results["failed"] = False diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index cacfd99c12..e2e62c2b23 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -23,6 +23,11 @@ def test_zos_tso_command_run_help(ansible_zos_module): for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == "help" assert result.get("changed") is True @@ -43,6 +48,11 @@ def test_zos_tso_command_long_command_128_chars(ansible_zos_module): for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == command_string[0] assert result.get("changed") is True @@ -56,43 +66,79 @@ def test_zos_tso_command_allocate_listing_delete(ansible_zos_module): results_allocate = hosts.all.zos_tso_command(commands=command_string) # Validate the correct allocation of dataset for result in results_allocate.contacted.values(): - for item in result.get("output"): + for index, item in enumerate(result.get("output")): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") == "" + assert item.get("command") == command_string[index] assert result.get("changed") is True # Validate listds of datasets and validate LISTDS using alias param 'command' of auth command - results = hosts.all.zos_tso_command(commands=[f"LISTDS '{default_temp_dataset}'"]) + cmd = f"LISTDS '{default_temp_dataset}'" + results = hosts.all.zos_tso_command(commands=[cmd]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == cmd assert result.get("changed") is True # Validate LISTDS using alias param 'command' - results = hosts.all.zos_tso_command(command=f"LISTDS '{default_temp_dataset}'") + cmd = f"LISTDS '{default_temp_dataset}'" + results = hosts.all.zos_tso_command(command=cmd) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == cmd assert result.get("changed") is True # Validate LISTCAT command and an unauth command + cmd = f"LISTCAT ENT('{default_temp_dataset}')" results = hosts.all.zos_tso_command( - commands=[f"LISTCAT ENT('{default_temp_dataset}')"] + commands=[cmd] ) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == cmd assert result.get("changed") is True # Validate remove dataset - results = hosts.all.zos_tso_command(commands=[f"delete '{default_temp_dataset}'"]) + cmd = f"delete '{default_temp_dataset}'" + results = hosts.all.zos_tso_command(commands=[cmd]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == cmd assert result.get("changed") is True # Expect the tso_command to fail here because # the previous command will have already deleted the data set # Validate data set was removed by previous call - results = hosts.all.zos_tso_command(commands=[f"delete '{default_temp_dataset}'"]) + cmd = f"delete '{default_temp_dataset}'" + results = hosts.all.zos_tso_command(commands=[cmd]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 8 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == cmd assert result.get("changed") is False + assert result.get("failed") is True # The failure test @@ -103,18 +149,31 @@ def test_zos_tso_command_empty_command(ansible_zos_module): for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 255 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == "" assert result.get("changed") is False + assert result.get("failed") is True # The failure test # The input command is no-existing command, the module return rc 255. def test_zos_tso_command_invalid_command(ansible_zos_module): hosts = ansible_zos_module - results = hosts.all.zos_tso_command(commands=["xxxxxx"]) + cmd = "xxxxxx" + results = hosts.all.zos_tso_command(commands=[]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 255 - assert result.get("changed") is False + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") == "" + assert item.get("command") == cmd + assert result.get("changed") is True + assert result.get("failed", False) is False # The positive test @@ -127,11 +186,13 @@ def test_zos_tso_command_multiple_commands(ansible_zos_module): commands_list = ["LU {0}".format(user), "LISTGRP"] results = hosts.all.zos_tso_command(commands=commands_list) for result in results.contacted.values(): - for item in result.get("output"): - if item.get("command") == "LU {0}".format(user): - assert item.get("rc") == 0 - if item.get("command") == "LISTGRP": - assert item.get("rc") == 0 + for index, item in enumerate(result.get("output")): + assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == commands_list[index] assert result.get("changed") is True @@ -139,13 +200,19 @@ def test_zos_tso_command_multiple_commands(ansible_zos_module): # The command that kicks off rc>0 which is allowed def test_zos_tso_command_maxrc(ansible_zos_module): hosts = ansible_zos_module + cmd = "LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC" results = hosts.all.zos_tso_command( - commands=["LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC"], + commands=[cmd], max_rc=4 ) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") < 5 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == cmd assert result.get("changed") is True @@ -158,25 +225,46 @@ def test_zos_tso_command_gds(ansible_zos_module): hosts.all.shell(cmd="dtouch -tseq '{0}(+1)' ".format(default_data_set)) print(f"data set name {default_data_set}") hosts = ansible_zos_module + cmd = """LISTDSD DATASET('{0}(0)') ALL GENERIC""".format(default_data_set) results = hosts.all.zos_tso_command( - commands=["""LISTDSD DATASET('{0}(0)') ALL GENERIC""".format(default_data_set)], + commands=[cmd], max_rc=4 ) for result in results.contacted.values(): for item in result.get("output"): assert result.get("changed") is True + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + # command has to be different because the GDS name gets resolved + assert item.get("command") != cmd + cmd = """LISTDSD DATASET('{0}(-1)') ALL GENERIC""".format(default_data_set) results = hosts.all.zos_tso_command( - commands=["""LISTDSD DATASET('{0}(-1)') ALL GENERIC""".format(default_data_set)], + commands=[cmd], max_rc=4 ) for result in results.contacted.values(): for item in result.get("output"): assert result.get("changed") is True + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + # command has to be different because the GDS name gets resolved + assert item.get("command") != cmd + cmd = """LISTDS '{0}(-1)'""".format(default_data_set) results = hosts.all.zos_tso_command( - commands=["""LISTDS '{0}(-1)'""".format(default_data_set)] + commands=[cmd] ) for result in results.contacted.values(): + for item in result.get("output"): + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + # command has to be different because the GDS name gets resolved + assert item.get("command") != cmd assert result.get("changed") is True finally: - None - # hosts.all.shell(cmd="drm ANSIBLE.*".format(default_data_set)) \ No newline at end of file + hosts.all.shell(cmd=f"drm -f {default_data_set}") \ No newline at end of file From 66397da80170aa8dd3630cf1569a76024c29b718 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Wed, 20 Aug 2025 11:30:26 -0600 Subject: [PATCH 20/73] [Enhancement][zos_mount][2.0] Module interface update (#2244) * Updated interface * Updated interface * Updated zos_mount tests and docs * Added a deprecation warning * Fixed validation an pep8 issues * Added new changelog * Updated changelog --- .../2244-zos_mount-interface-update.yml | 4 + plugins/modules/zos_mount.py | 116 +++++++++++------- .../functional/modules/test_zos_mount_func.py | 16 +-- 3 files changed, 81 insertions(+), 55 deletions(-) create mode 100644 changelogs/fragments/2244-zos_mount-interface-update.yml diff --git a/changelogs/fragments/2244-zos_mount-interface-update.yml b/changelogs/fragments/2244-zos_mount-interface-update.yml new file mode 100644 index 0000000000..3cd516e7ad --- /dev/null +++ b/changelogs/fragments/2244-zos_mount-interface-update.yml @@ -0,0 +1,4 @@ +breaking_changes: + - zos_mount - Option ``persistent.data_store`` is deprecated in favor of ``persistent.name``. + Option ``persistent.comment`` is deprecated in favor of ``persistent.marker``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2244). \ No newline at end of file diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index a35dda095a..9e14d88915 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -62,38 +62,38 @@ - > If I(state=mounted) and I(src) are not in use, the module will add the file system entry to the parmlib member - I(persistent/data_store) if not present. The I(path) will be + I(persistent/name) if not present. The I(path) will be updated, the device will be mounted and the module will complete successfully with I(changed=True). - > If I(state=mounted) and I(src) are in use, the module will add the file system entry to the parmlib member - I(persistent/data_store) if not present. The I(path) will not + I(persistent/name) if not present. The I(path) will not be updated, the device will not be mounted and the module will complete successfully with I(changed=False). - > If I(state=unmounted) and I(src) are in use, the module will B(not) add the file system entry to the parmlib member - I(persistent/data_store). The device will be unmounted and + I(persistent/name). The device will be unmounted and the module will complete successfully with I(changed=True). - > If I(state=unmounted) and I(src) are not in use, the module will B(not) add the file system entry to parmlib member - I(persistent/data_store).The device will remain unchanged and + I(persistent/name).The device will remain unchanged and the module will complete with I(changed=False). - > If I(state=present), the module will add the file system entry - to the provided parmlib member I(persistent/data_store) + to the provided parmlib member I(persistent/name) if not present. The module will complete successfully with I(changed=True). - > If I(state=absent), the module will remove the file system entry - to the provided parmlib member I(persistent/data_store) if + to the provided parmlib member I(persistent/name) if present. The module will complete successfully with I(changed=True). - > If I(state=remounted), the module will B(not) add the file - system entry to parmlib member I(persistent/data_store). The + system entry to parmlib member I(persistent/name). The device will be unmounted and mounted, the module will complete successfully with I(changed=True). type: str @@ -107,22 +107,23 @@ default: mounted persistent: description: - - Add or remove mount command entries to provided I(data_store) + - Add or remove mount command entries to provided I(name) required: False type: dict suboptions: - data_store: + name: description: - The data set name used for persisting a mount command. This is usually BPXPRMxx or a copy. required: True type: str + aliases: [ data_store ] backup: description: - Creates a backup file or backup data set for - I(data_store), including the timestamp information to + I(name), including the timestamp information to ensure that you retrieve the original parameters defined - in I(data_store). + in I(name). - I(backup_name) can be used to specify a backup file name if I(backup=true). - The backup file name will be returned on either success or @@ -135,7 +136,7 @@ description: - Specify the USS file name or data set name for the destination backup. - - If the source I(data_store) is a USS file or path, the + - If the source I(name) is a USS file or path, the I(backup_name) name can be relative or absolute for file or path name. - If the source is an MVS data set, the backup_name must be @@ -151,15 +152,16 @@ MVS backup data set recovery can be done by renaming it. required: false type: str - comment: + marker: description: - - If provided, this is used as a comment that surrounds the - command in the I(persistent/data_store) - - Comments are used to encapsulate the I(persistent/data_store) entry + - If provided, this is used as a marker that surrounds the + command in the I(persistent/name) + - Comments are used to encapsulate the I(persistent/name) entry such that they can easily be understood and located. type: list elements: str required: False + aliases: [ comment ] unmount_opts: description: - Describes how the unmount will be performed. @@ -356,8 +358,8 @@ fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) - comment: For Tape2 project + name: SYS1.PARMLIB(BPXPRMAA) + marker: For Tape2 project - name: Mount a filesystem and record change in BPXPRMAA after backing up to BPXPRMAB. zos_mount: @@ -366,10 +368,10 @@ fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) + name: SYS1.PARMLIB(BPXPRMAA) backup: true backup_name: SYS1.PARMLIB(BPXPRMAB) - comment: For Tape2 project + marker: For Tape2 project - name: Mount a filesystem ignoring uid/gid values. zos_mount: @@ -440,7 +442,7 @@ returned: always type: dict contains: - data_store: + name: description: The persistent store name where the mount was written to. returned: always type: str @@ -455,8 +457,8 @@ returned: always type: str sample: SYS1.FILESYS(PRMAABAK) - comment: - description: The text that was used in markers around the I(Persistent/data_store) entry. + marker: + description: The text that was used in markers around the I(Persistent/name) entry. returned: always type: list sample: @@ -740,7 +742,7 @@ def run_module(module, arg_def): persistent = parsed_args.get("persistent") backup = None backup_name = "" - comment = None + marker = None unmount_opts = parsed_args.get("unmount_opts") mount_opts = parsed_args.get("mount_opts") src_params = parsed_args.get("src_params") @@ -753,8 +755,8 @@ def run_module(module, arg_def): tmphlq = parsed_args.get("tmp_hlq") if persistent: - data_store = persistent.get("data_store").upper() - comment = persistent.get("comment") + name = persistent.get("name").upper() + marker = persistent.get("marker") backup = persistent.get("backup") if backup: if persistent.get("backup_name"): @@ -763,20 +765,20 @@ def run_module(module, arg_def): backup_code = None else: backup_code = backup_name - backup_name = mt_backupOper(module, data_store, backup_code, tmphlq) + backup_name = mt_backupOper(module, name, backup_code, tmphlq) res_args["backup_name"] = backup_name del persistent["backup"] if "mounted" in state or "present" in state: - persistent["addDataset"] = data_store + persistent["addDataset"] = name else: - persistent["delDataset"] = data_store - del persistent["data_store"] + persistent["delDataset"] = name + del persistent["name"] write_persistent = False if "mounted" in state or "present" in state or "absent" in state: if persistent: - if data_store: - if len(data_store) > 0: + if name: + if len(name) > 0: write_persistent = True will_mount = True @@ -805,7 +807,7 @@ def run_module(module, arg_def): automove_list=automove_list, cmd="not built", changed=changed, - comment=comment, + marker=marker, rc=0, stdout="", stderr="", @@ -869,10 +871,10 @@ def run_module(module, arg_def): parmtext = "" - if comment is not None: + if marker is not None: extra = "" ctr = 1 - for tabline in comment: + for tabline in marker: if len(extra) > 0: extra += " " extra += tabline.strip() @@ -1033,18 +1035,18 @@ def run_module(module, arg_def): stderr = "Mount called on data set that is already mounted.\n" if write_persistent and module.check_mode is False: - fst_du = data_set.DataSetUtils(data_store, tmphlq=tmphlq) + fst_du = data_set.DataSetUtils(name, tmphlq=tmphlq) fst_exists = fst_du.exists() if fst_exists is False: module.fail_json( - msg="Persistent data set ({0}) is either not cataloged or does not exist.".format(data_store), + msg="Persistent data set ({0}) is either not cataloged or does not exist.".format(name), stderr=str(res_args), ) bk_ds = datasets.tmp_name(high_level_qualifier=tmphlq) datasets.create(name=bk_ds, dataset_type="SEQ") - new_str = get_str_to_keep(dataset=data_store, src=src) + new_str = get_str_to_keep(dataset=name, src=src) rc_write = 0 @@ -1056,13 +1058,13 @@ def run_module(module, arg_def): except Exception as e: datasets.delete(dataset=bk_ds) module.fail_json( - msg="Unable to write on persistent data set {0}. {1}".format(data_store, e), + msg="Unable to write on persistent data set {0}. {1}".format(name, e), stderr=str(res_args), ) try: - datasets.delete(dataset=data_store) - datasets.copy(source=bk_ds, target=data_store) + datasets.delete(dataset=name) + datasets.copy(source=bk_ds, target=name) finally: datasets.delete(dataset=bk_ds) @@ -1072,7 +1074,7 @@ def run_module(module, arg_def): marker = '/* {mark} ANSIBLE MANAGED BLOCK ' + dtstr + " */" marker = "{0}\\n{1}\\n{2}".format("BEGIN", "END", marker) - datasets.blockinfile(dataset=data_store, state=True, block=parmtext, marker=marker, insert_after="EOF") + datasets.blockinfile(dataset=name, state=True, block=parmtext, marker=marker, insert_after="EOF") if rc == 0: if stdout is None: @@ -1091,7 +1093,7 @@ def run_module(module, arg_def): stderr=stderr, ) ) - del res_args["comment"] + del res_args["marker"] return res_args @@ -1130,13 +1132,33 @@ def main(): type="dict", required=False, options=dict( - data_store=dict( + name=dict( type="str", required=True, + aliases=["data_store"], + deprecated_aliases=[ + dict( + name='data_store', + version='3.0.0', # Version when it will be removed + collection_name='ibm.ibm_zos_core', + ) + ], ), backup=dict(type="bool", default=False), backup_name=dict(type="str", required=False, default=None), - comment=dict(type="list", elements="str", required=False), + marker=dict( + type="list", + elements="str", + required=False, + aliases=["comment"], + deprecated_aliases=[ + dict( + name='comment', + version='3.0.0', # Version when it will be removed + collection_name='ibm.ibm_zos_core', + ) + ], + ), ), ), unmount_opts=dict( @@ -1193,10 +1215,10 @@ def main(): arg_type="dict", required=False, options=dict( - data_store=dict(arg_type="str", required=True), + name=dict(arg_type="str", required=True, aliases=["data_store"]), backup=dict(arg_type="bool", default=False), backup_name=dict(arg_type="str", required=False, default=None), - comment=dict(arg_type="list", elements="str", required=False), + marker=dict(arg_type="list", elements="str", required=False, aliases=["comment"]), ), ), unmount_opts=dict( diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py index 532c2f213e..3cdf502e1e 100644 --- a/tests/functional/modules/test_zos_mount_func.py +++ b/tests/functional/modules/test_zos_mount_func.py @@ -187,7 +187,7 @@ def test_remount(ansible_zos_module, volumes_on_systems): hosts.all.file(path="/pythonx/", state="absent") -def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_systems): +def test_basic_mount_with_bpx_nomarker_nobackup(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() @@ -231,7 +231,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ path="/pythonx", fs_type="zfs", state="mounted", - persistent=dict(data_store=dest_path), + persistent=dict(name=dest_path), ) for result in mount_result.values(): @@ -296,7 +296,7 @@ def test_basic_mount_with_bpx_no_utf_8_characters(ansible_zos_module, volumes_on path="/pythonx", fs_type="zfs", state="mounted", - persistent=dict(data_store=dest_path), + persistent=dict(name=dest_path), ) for result in mount_result.values(): @@ -330,7 +330,7 @@ def test_basic_mount_with_bpx_no_utf_8_characters(ansible_zos_module, volumes_on stdin="", ) -def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_systems): +def test_basic_mount_with_bpx_marker_backup(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() @@ -391,10 +391,10 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst fs_type="zfs", state="mounted", persistent=dict( - data_store=dest_path, + name=dest_path, backup="Yes", backup_name=back_dest_path, - comment=["bpxtablecomment - try this", "second line of comment"], + marker=["bpxtablemarker - try this", "second line of marker"], ), ) # copying from dataset to make editable copy on target @@ -421,7 +421,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst assert result.get("changed") is True assert srcfn in data - assert "bpxtablecomment - try this" in data + assert "bpxtablemarker - try this" in data finally: hosts.all.zos_mount( src=srcfn, @@ -471,7 +471,7 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems) fs_type="zfs", state="absent", tmp_hlq=tmphlq, - persistent=dict(data_store=persist_data_set, backup=True) + persistent=dict(name=persist_data_set, backup=True) ) hosts.all.shell( cmd="drm " + DataSet.escape_data_set_name(srcfn), From 1856512be82555db1c0114a3a87f446fbd4d0f1b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 26 Aug 2025 15:21:54 -0500 Subject: [PATCH 21/73] [Enabler][2139]update_zos_apf_interface (#2250) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Test * Migrate zos apf * Debug print output * Complete migration * Fix typo * Fix typo * Fix stderr * Run specific case * Run specific case * Fix test * Add fragment * fix sanity * Fix sanity * Update changelogs/fragments/2250-Update_zos_apf_interface.yml Co-authored-by: Fernando Flores * Fix comments --------- Co-authored-by: André Marcel Gutiérrez Benítez Co-authored-by: Fernando Flores --- .../2250-Update_zos_apf_interface.yml | 3 + plugins/modules/zos_apf.py | 64 +++++--- tests/functional/modules/test_zos_apf_func.py | 140 +++++++++++------- 3 files changed, 133 insertions(+), 74 deletions(-) create mode 100644 changelogs/fragments/2250-Update_zos_apf_interface.yml diff --git a/changelogs/fragments/2250-Update_zos_apf_interface.yml b/changelogs/fragments/2250-Update_zos_apf_interface.yml new file mode 100644 index 0000000000..c6acc832ba --- /dev/null +++ b/changelogs/fragments/2250-Update_zos_apf_interface.yml @@ -0,0 +1,3 @@ +breaking_changes: + - zos_apf - Option ``data_set_name`` is deprecated in favor of ``target``. New return values ``stdout_lines`` and ``stderr_lines`` are now returned. + (https://github.com/ansible-collections/ibm_zos_core/pull/2250). \ No newline at end of file diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 211a1f4789..5f8ca037dd 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -102,17 +102,18 @@ type: str persistent: description: - - Add/remove persistent entries to or from I(data_set_name) + - Add/remove persistent entries to or from I(target) - C(library) will not be persisted or removed if C(persistent=None) required: False type: dict suboptions: - data_set_name: + target: description: - The data set name used for persisting or removing a C(library) from the APF list. required: True type: str + aliases: [data_set_name] marker: description: - The marker line template. @@ -127,10 +128,10 @@ default: "/* {mark} ANSIBLE MANAGED BLOCK */" backup: description: - - Creates a backup file or backup data set for I(data_set_name), + - Creates a backup file or backup data set for I(target), including the timestamp information to ensure that you retrieve the original APF list - defined in I(data_set_name)". + defined in I(target)". - I(backup_name) can be used to specify a backup file name if I(backup=true). - The backup file name will be return on either success or failure @@ -142,7 +143,7 @@ description: - Specify the USS file name or data set name for the destination backup. - - If the source I(data_set_name) is a USS file or path, the + - If the source I(target) is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup_name must be @@ -237,18 +238,18 @@ library: SOME.SEQUENTIAL.DATASET force_dynamic: true persistent: - data_set_name: SOME.PARTITIONED.DATASET(MEM) + target: SOME.PARTITIONED.DATASET(MEM) - name: Remove a library from the APF list and persistence zos_apf: state: absent library: SOME.SEQUENTIAL.DATASET volume: T12345 persistent: - data_set_name: SOME.PARTITIONED.DATASET(MEM) + target: SOME.PARTITIONED.DATASET(MEM) - name: Batch libraries with custom marker, persistence for the APF list zos_apf: persistent: - data_set_name: "SOME.PARTITIONED.DATASET(MEM)" + target: "SOME.PARTITIONED.DATASET(MEM)" marker: "/* {mark} PROG001 USR0010 */" batch: - library: SOME.SEQ.DS1 @@ -283,12 +284,22 @@ check_format> DYNAMIC or STATIC" returned: always type: str +stdout_lines: + description: List of strings containing individual lines from STDOUT. + returned: always + type: list stderr: description: The error messages from ZOAU command apfadm returned: always type: str sample: "BGYSC1310E ADD Error: Dataset COMMON.LINKLIB volume COMN01 is already present in APF list." +stderr_lines: + description: List of strings containing individual lines from STDERR. + returned: always + type: list + sample: ["BGYSC1310E ADD Error: Dataset COMMON.LINKLIB volume COMN01 is already + present in APF list."] rc: description: The return code from ZOAU command apfadm returned: always @@ -310,7 +321,7 @@ from ansible.module_utils._text import to_text from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - better_arg_parser, zoau_version_checker, data_set, backup as Backup) + better_arg_parser, data_set, backup as Backup) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( ZOAUImportError, @@ -436,9 +447,16 @@ def main(): type='dict', required=False, options=dict( - data_set_name=dict( + target=dict( type='str', required=True, + aliases=["data_set_name"], + deprecated_aliases=[ + dict( + name='data_set_name', + version='2.0.0', + collection_name='ibm.ibm_zos_core') + ], ), marker=dict( type='str', @@ -503,7 +521,7 @@ def main(): arg_type='dict', required=False, options=dict( - data_set_name=dict(arg_type='str', required=True), + target=dict(arg_type='str', required=True, aliases=["data_set_name"]), marker=dict(arg_type='str', required=False, default='/* {mark} ANSIBLE MANAGED BLOCK */'), backup=dict(arg_type='bool', default=False), backup_name=dict(arg_type='str', required=False, default=None), @@ -549,7 +567,7 @@ def main(): tmphlq = module.params.get('tmp_hlq') if persistent: - data_set_name = persistent.get('data_set_name') + target = persistent.get('target') backup = persistent.get('backup') marker = persistent.get('marker') if len(marker) > 71: @@ -558,13 +576,13 @@ def main(): if persistent.get('backup_name'): backup = persistent.get('backup_name') del persistent['backup_name'] - result['backup_name'] = backupOper(module, data_set_name, backup, tmphlq) + result['backup_name'] = backupOper(module, target, backup, tmphlq) del persistent['backup'] if state == "present": - persistent['addDataset'] = data_set_name + persistent['addDataset'] = target else: - persistent['delDataset'] = data_set_name - del persistent['data_set_name'] + persistent['delDataset'] = target + del persistent['target'] if operation: ret = zsystem.apf(opt=operation) @@ -580,26 +598,22 @@ def main(): del item['library'] # ignore=true is added so that it's ignoring in case of addition if already present # ignore=true is added so that it's ignoring in case the file is not in apf list while deletion - if zoau_version_checker.is_zoau_version_higher_than("1.3.4"): - ret = zsystem.apf(batch=batch, forceDynamic=force_dynamic, persistent=persistent, ignore=True) - else: - ret = zsystem.apf(batch=batch, forceDynamic=force_dynamic, persistent=persistent) + ret = zsystem.apf(batch=batch, forceDynamic=force_dynamic, persistent=persistent, ignore=True) else: if not library: module.fail_json(msg='library is required') # ignore=true is added so that it's ignoring in case of addition if already present # ignore=true is added so that it's ignoring in case the file is not in apf list while deletion - if zoau_version_checker.is_zoau_version_higher_than("1.3.4"): - ret = zsystem.apf(opt=opt, dsname=library, volume=volume, sms=sms, forceDynamic=force_dynamic, persistent=persistent, ignore=True) - else: - ret = zsystem.apf(opt=opt, dsname=library, volume=volume, sms=sms, forceDynamic=force_dynamic, persistent=persistent) + ret = zsystem.apf(opt=opt, dsname=library, volume=volume, sms=sms, forceDynamic=force_dynamic, persistent=persistent, ignore=True) operOut = ret.stdout_response operErr = ret.stderr_response operRc = ret.rc result['stderr'] = operErr + result['stderr_lines'] = operErr.split("\n") result['rc'] = operRc result['stdout'] = operOut + result['stdout_lines'] = operOut.split("\n") if operation != 'list' and operRc == 0: if operErr.strip(): @@ -631,6 +645,7 @@ def main(): except re.error: module.exit_json(**result) result['stdout'] = ds_list + result['stdout_lines'] = ds_list.split("\n") else: """ ZOAU 1.3 changed the output from apf, having the data set list inside a new "data" tag. @@ -638,6 +653,7 @@ def main(): """ try: result['stdout'] = json.dumps(data.get("data")) + result['stdout_lines'] = json.dumps(data.get("data")).split("\n") except Exception as e: err_msg = "An exception occurred. See stderr for more details." module.fail_json(msg=err_msg, stderr=to_text(e), rc=operErr) diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index 8333ce3625..b2c00bc499 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -14,7 +14,6 @@ from __future__ import absolute_import, division, print_function from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name from ibm_zos_core.tests.helpers.volumes import Volume_Handler -from ibm_zos_core.tests.helpers.version import get_zoau_version __metaclass__ = type @@ -48,7 +47,7 @@ def clean_test_env(hosts, test_info): cmd_str = f"drm '{test_info['library']}' " hosts.all.shell(cmd=cmd_str) if test_info.get('persistent'): - cmd_str = f"drm '{test_info['persistent']['data_set_name']}' " + cmd_str = f"drm '{test_info['persistent']['target']}' " hosts.all.shell(cmd=cmd_str) @@ -79,10 +78,14 @@ def test_add_del(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): @@ -103,7 +106,7 @@ def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds): "force_dynamic":True, "tmp_hlq":"", "persistent":{ - "data_set_name":"", + "target":"", "backup":True } } @@ -125,11 +128,15 @@ def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 assert result.get("backup_name")[:6] == tmphlq + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): @@ -167,11 +174,15 @@ def test_add_del_volume(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): @@ -218,7 +229,7 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): "library":"", "volume":"", "persistent":{ - "data_set_name":"", + "target":"", "marker":"/* {mark} BLOCK */"}, "state":"present", "force_dynamic":True @@ -240,13 +251,13 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 add_exptd = ADD_EXPECTED.format(test_info['library'], test_info['volume']) add_exptd = add_exptd.replace(" ", "") - cmd_str = f"cat \"//'{test_info['persistent']['data_set_name']}'\" " + cmd_str = f"cat \"//'{test_info['persistent']['target']}'\" " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") @@ -256,8 +267,12 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] del_exptd = DEL_EXPECTED.replace(" ", "") - cmd_str = f"cat \"//'{test_info['persistent']['data_set_name']}'\" " + cmd_str = f"cat \"//'{test_info['persistent']['target']}'\" " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") @@ -288,7 +303,7 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): } ], "persistent":{ - "data_set_name":"", + "target":"", "marker":"/* {mark} BLOCK */" }, "state":"present", @@ -307,10 +322,14 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] add_exptd = ADD_BATCH_EXPECTED.format( test_info['batch'][0]['library'], test_info['batch'][0]['volume'], @@ -320,7 +339,7 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): test_info['batch'][2]['volume'] ) add_exptd = add_exptd.replace(" ", "") - cmd_str = f"""dcat '{test_info["persistent"]["data_set_name"]}' """ + cmd_str = f"""dcat '{test_info["persistent"]["target"]}' """ results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") @@ -330,8 +349,12 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] del_exptd = DEL_EXPECTED.replace(" ", "") - cmd_str = f"""dcat '{test_info["persistent"]["data_set_name"]}' """ + cmd_str = f"""dcat '{test_info["persistent"]["target"]}' """ results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") @@ -340,7 +363,7 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): finally: for item in test_info['batch']: clean_test_env(hosts, item) - hosts.all.shell(cmd=f"drm '{test_info['persistent']['data_set_name']}' ") + hosts.all.shell(cmd=f"drm '{test_info['persistent']['target']}' ") def test_operation_list(ansible_zos_module): @@ -351,6 +374,11 @@ def test_operation_list(ansible_zos_module): } results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): + assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] list_json = result.get("stdout") data = json.loads(list_json) assert data['format'] in ['DYNAMIC', 'STATIC'] @@ -385,7 +413,7 @@ def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds hosts.all.zos_apf(**test_info) ti = { "operation":"list", @@ -394,6 +422,11 @@ def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds): ti['library'] = "ANSIBLE.*" results = hosts.all.zos_apf(**ti) for result in results.contacted.values(): + assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] list_filtered = result.get("stdout") assert test_info['library'] in list_filtered test_info['state'] = 'absent' @@ -433,24 +466,22 @@ def test_add_already_present(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] # Second call to zos_apf, same as first but with different expectations results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - # RC 0 should be allowed for ZOAU >= 1.3.4, - # in zoau < 1.3.4 -i is not recognized in apfadm - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 - zoa_version = get_zoau_version(hosts) or "0.0.0.0" - rc = result.get("rc") - if zoa_version >= "1.3.4.0": - assert rc == 0 - elif zoa_version >= "1.2.0.0": - assert rc == 8 - else: - assert rc == 16 + assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") is not None + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") is not None test_info['state'] = 'absent' hosts.all.zos_apf(**test_info) finally: @@ -484,21 +515,15 @@ def test_del_not_present(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - # RC 0 should be allowed for ZOAU >= 1.3.4, - # in zoau < 1.3.4 -i is not recognized in apfadm - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 - zoa_version = get_zoau_version(hosts) or "0.0.0.0" - rc = result.get("rc") - if zoa_version >= "1.3.4.0": - assert rc == 0 - elif zoa_version >= "1.2.0.0": - assert rc == 8 - else: - assert rc == 16 + assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") is not None + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") is not None finally: clean_test_env(hosts, test_info) @@ -513,7 +538,10 @@ def test_add_not_found(ansible_zos_module): test_info['library'] = f'{TEST_HLQ}.FOO.BAR' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 + assert result.get("stdout") is not None + assert result.get("stderr") is not None + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") is not None assert result.get("rc") == 16 or result.get("rc") == 8 @@ -546,11 +574,14 @@ def test_add_with_wrong_volume(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds test_info['volume'] = 'T12345' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 + assert result.get("stdout") is not None + assert result.get("stderr") is not None + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") is not None assert result.get("rc") == 16 or result.get("rc") == 8 finally: clean_test_env(hosts, test_info) @@ -564,7 +595,7 @@ def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds): test_info = { "library":"", "persistent":{ - "data_set_name":"", + "target":"", "marker":"/* {mark} BLOCK */" }, "state":"present", @@ -588,13 +619,17 @@ def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds - ds_name = test_info['persistent']['data_set_name'] + test_info['persistent']['target'] = prstds + ds_name = test_info['persistent']['target'] cmd_str =f"decho \"some text to test persistent data_set format validation.\" \"{ds_name}\"" hosts.all.shell(cmd=cmd_str) results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 8 + assert result.get("stdout") is not None + assert result.get("stderr") is not None + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") is not None finally: clean_test_env(hosts, test_info) @@ -607,7 +642,7 @@ def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds): test_info = { "library":"", "persistent":{ - "data_set_name":"", + "target":"", "marker":"/* {mark} BLOCK */" }, "state":"present", @@ -631,11 +666,15 @@ def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds test_info['persistent']['marker'] = "# Invalid marker format" results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 4 + assert result.get("stdout") is not None + assert result.get("stderr") is not None + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") is not None finally: clean_test_env(hosts, test_info) @@ -648,7 +687,7 @@ def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds): test_info = { "library":"", "persistent":{ - "data_set_name":"", + "target":"", "marker":"/* {mark} BLOCK */" }, "state":"present", @@ -672,10 +711,11 @@ def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds test_info['persistent']['marker'] = "/* {mark} This is a awfully lo%70sng marker */" % ("o") results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): + assert result.get("failed") is True assert result.get("msg") == 'marker length may not exceed 72 characters' finally: clean_test_env(hosts, test_info) From d2a85fd7ec9d1c0ee4656d99a55c0142c1197a3f Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Tue, 26 Aug 2025 16:05:58 -0600 Subject: [PATCH 22/73] [Enhancement][2.0][zos_lineinfile] Update module interface (#2253) * Updated zos_lineinfile * Fixed tests * Added changelogs * Corrected changelog * Added return_content assignment to None * Fixed docs * [zos_lineinfile] Avoid leaving behind a temporary file when running test suite (#2258) * Added tests * Fixed lineinfile * removed print statements * Restored zos_data_set * Updated changelog * Updated changelogs --- .../2253-zos_lineinfile-interface-update.yml | 12 +++ plugins/modules/zos_lineinfile.py | 76 +++++++++++------ .../modules/test_zos_lineinfile_func.py | 85 +++++++++++-------- 3 files changed, 111 insertions(+), 62 deletions(-) create mode 100644 changelogs/fragments/2253-zos_lineinfile-interface-update.yml diff --git a/changelogs/fragments/2253-zos_lineinfile-interface-update.yml b/changelogs/fragments/2253-zos_lineinfile-interface-update.yml new file mode 100644 index 0000000000..b7131c96b7 --- /dev/null +++ b/changelogs/fragments/2253-zos_lineinfile-interface-update.yml @@ -0,0 +1,12 @@ +breaking_changes: + - zos_lineinfile - Removes return value ``return_content`` in favor of ``stdout``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2253). + +minor_changes: + - zos_lineinfile - Adds alias ``after`` for option ``insertafter`` and alias ``before`` for option ``insertbefore``. + Adds new return values ``stdout_lines`` and ``stderr_lines`` that returns any outstanding stdout or stderr message split in lines. + (https://github.com/ansible-collections/ibm_zos_core/pull/2253). + +trivial: + - test_zos_lineinfile_func.py - Added a proper cleanup of a temporary file. + (https://github.com/ansible-collections/ibm_zos_core/pull/2253). \ No newline at end of file diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index f5565ce9ee..c6539eb4f8 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -104,6 +104,7 @@ - Default is EOF required: false type: str + aliases: [ after ] insertbefore: description: - Used with C(state=present). @@ -121,6 +122,7 @@ - Choices are BOF or '*regex*' required: false type: str + aliases: [ before ] backup: description: - Creates a backup file or backup data set for I(src), including the @@ -291,11 +293,23 @@ returned: failure type: str sample: Parameter verification failed -return_content: +stdout: + description: The stdout from ZOAU dsed command. + returned: always + type: str +stderr: description: The error messages from ZOAU dsed - returned: failure + returned: always type: str sample: BGYSC1311E Iconv error, cannot open converter from ISO-88955-1 to IBM-1047 +stdout_lines: + description: List of strings containing individual lines from stdout. + returned: always + type: list +stderr_lines: + description: List of strings containing individual lines from stderr. + returned: always + type: list backup_name: description: Name of the backup file or data set that was created. returned: if backup=true @@ -604,9 +618,11 @@ def main(): line=dict(type='str'), insertafter=dict( type='str', + aliases=['after'] ), insertbefore=dict( type='str', + aliases=['before'] ), backrefs=dict(type='bool', default=False), backup=dict(type='bool', default=False), @@ -627,8 +643,8 @@ def main(): state=dict(arg_type="str", default='present', choices=['absent', 'present']), regexp=dict(arg_type="str", required=False), line=dict(arg_type="str", required=False), - insertafter=dict(arg_type="str", required=False), - insertbefore=dict(arg_type="str", required=False), + insertafter=dict(arg_type="str", required=False, aliases=['after']), + insertbefore=dict(arg_type="str", required=False, aliases=['before']), encoding=dict(arg_type="str", default="IBM-1047", required=False), backup=dict(arg_type="bool", default=False, required=False), backup_name=dict(arg_type="data_set_or_path", required=False, default=None), @@ -673,7 +689,24 @@ def main(): is_gds = False has_special_chars = False dmod_exec = False - return_content = "" + rc = 0 + stdout = '' + stderr = '' + cmd = '' + changed = False + return_content = None + + result = dict( + changed=False, + cmd='', + found=0, + stdout='', + stdout_lines=[], + stderr='', + stderr_lines=[], + rc=0, + backup_name='', + ) # analysis the file type if "/" not in src: @@ -717,27 +750,19 @@ def main(): # state=absent, delete lines with matching regex pattern if parsed_args.get('state') == 'present': if dmod_exec: - rc, cmd, stodut = execute_dsed(src, state=True, encoding=encoding, module=module, line=line, first_match=firstmatch, + rc, cmd, stdout = execute_dsed(src, state=True, encoding=encoding, module=module, line=line, first_match=firstmatch, force=force, backrefs=backrefs, regex=regexp, ins_bef=ins_bef, ins_aft=ins_aft) - result['rc'] = rc - result['cmd'] = cmd - result['stodut'] = stodut - result['return_content'] = stodut - result['changed'] = True if rc == 0 else False stderr = 'Failed to insert new entry' if rc != 0 else "" + changed = True if rc == 0 else False else: return_content = present(src, quotedString(line), quotedString(regexp), quotedString(ins_aft), quotedString(ins_bef), encoding, firstmatch, backrefs, force) else: if dmod_exec: - rc, cmd, stodut = execute_dsed(src, state=False, encoding=encoding, module=module, line=line, first_match=firstmatch, force=force, + rc, cmd, stdout = execute_dsed(src, state=False, encoding=encoding, module=module, line=line, first_match=firstmatch, force=force, backrefs=backrefs, regex=regexp, ins_bef=ins_bef, ins_aft=ins_aft) - result['rc'] = rc - result['cmd'] = cmd - result['stodut'] = stodut - result['return_content'] = stodut - result['changed'] = True if rc == 0 else False stderr = 'Failed to insert new entry' if rc != 0 else "" + changed = True if rc == 0 else False else: return_content = absent(src, quotedString(line), quotedString(regexp), encoding, force) if not dmod_exec: @@ -790,17 +815,16 @@ def main(): if 'cmd' in ret: ret['cmd'] = ret['cmd'].replace('\\"', '"').replace('\\\\', '\\') result['cmd'] = ret['cmd'] - result['changed'] = ret.get('changed', False) + changed = ret.get('changed', False) result['found'] = ret.get('found', 0) result['stdout'] = stdout - result['return_content'] = stdout - # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case - # That information will be given with 'changed' and 'found' - if len(stderr): - result['stderr'] = str(stderr) - result['rc'] = rc - if 'backup_name' not in result: - result['backup_name'] = "" + result['changed'] = changed + result['rc'] = rc + result['cmd'] = cmd + result['stdout'] = str(stdout) + result['stderr'] = str(stderr) + result['stdout_lines'] = result['stdout'].splitlines() + result['stderr_lines'] = result['stderr'].splitlines() module.exit_json(**result) diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 703a1c77ac..aaecb3c306 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2020, 2024 +# Copyright (c) IBM Corporation 2020, 2025 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -27,6 +27,17 @@ __metaclass__ = type +expected_keys = [ + 'changed', + 'cmd', + 'found', + 'stdout', + 'stdout_lines', + 'stderr', + 'stderr_lines', + 'rc' +] + c_pgm="""#include #include #include @@ -259,7 +270,7 @@ def set_uss_environment(ansible_zos_module, content, file): def remove_uss_environment(ansible_zos_module, file): hosts = ansible_zos_module - hosts.all.shell(cmd="rm " + file) + hosts.all.shell(cmd=f"rm '{file}'") def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content): hosts = ansible_zos_module @@ -310,7 +321,7 @@ def test_uss_line_replace(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE @@ -334,7 +345,7 @@ def test_uss_line_insertafter_regex(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX @@ -358,7 +369,7 @@ def test_uss_line_insertbefore_regex(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX @@ -382,7 +393,7 @@ def test_uss_line_insertafter_eof(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF @@ -406,7 +417,7 @@ def test_uss_line_insertbefore_bof(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF @@ -431,7 +442,7 @@ def test_uss_line_replace_match_insertafter_ignore(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE @@ -456,7 +467,7 @@ def test_uss_line_replace_match_insertbefore_ignore(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE @@ -481,7 +492,7 @@ def test_uss_line_replace_nomatch_insertafter_match(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER @@ -506,7 +517,7 @@ def test_uss_line_replace_nomatch_insertbefore_match(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE @@ -531,7 +542,7 @@ def test_uss_line_replace_nomatch_insertafter_nomatch(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH @@ -556,7 +567,7 @@ def test_uss_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH @@ -581,7 +592,7 @@ def test_uss_line_absent(ansible_zos_module): for result in results.contacted.values(): print(result) assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT @@ -604,7 +615,7 @@ def test_uss_advanced_regular_expression_absent(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT @@ -629,7 +640,7 @@ def test_uss_line_replace_quoted_escaped(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_QUOTED @@ -654,7 +665,7 @@ def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_QUOTED @@ -677,7 +688,7 @@ def test_uss_line_does_not_insert_repeated(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT @@ -716,7 +727,7 @@ def test_ds_line_insertafter_regex(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX @@ -746,7 +757,7 @@ def test_ds_line_insert_before_ansible_block(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_TEST_PARSING_CONTENT @@ -772,7 +783,7 @@ def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX @@ -799,7 +810,7 @@ def test_ds_line_insertafter_eof(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF @@ -825,7 +836,7 @@ def test_ds_line_insertbefore_bof(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF @@ -853,7 +864,7 @@ def test_ds_line_replace_match_insertafter_ignore(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE @@ -881,7 +892,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE @@ -904,7 +915,7 @@ def test_gds_ds_insert_line(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) cmd = result.get("cmd").split() for cmd_p in cmd: if ds_name in cmd_p: @@ -917,7 +928,7 @@ def test_gds_ds_insert_line(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) cmd = result.get("cmd").split() for cmd_p in cmd: if ds_name in cmd_p: @@ -932,7 +943,7 @@ def test_gds_ds_insert_line(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") == 1 assert result.get("rc") == 0 - assert "return_content" in result + assert all(key in result for key in expected_keys) backup = ds_name + "(0)" results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) for result in results.contacted.values(): @@ -942,7 +953,7 @@ def test_gds_ds_insert_line(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) finally: hosts.all.shell(cmd="""drm "{0}*" """.format(ds_name)) @@ -961,7 +972,7 @@ def test_special_characters_ds_insert_line(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) src = ds_name.replace('$', "\$") results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(src)) for result in results.contacted.values(): @@ -974,7 +985,7 @@ def test_special_characters_ds_insert_line(ansible_zos_module): print(result) assert result.get("changed") == 1 assert result.get("rc") == 0 - assert "return_content" in result + assert all(key in result for key in expected_keys) backup = backup.replace('$', "\$") results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) for result in results.contacted.values(): @@ -1100,7 +1111,7 @@ def test_ds_line_absent(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT @@ -1139,7 +1150,7 @@ def test_ds_tmp_hlq_option(ansible_zos_module): params["path"] = ds_full_name results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): - assert "return_content" in result + assert all(key in result for key in expected_keys) for key in kwargs: assert kwargs.get(key) in result.get(key) finally: @@ -1326,6 +1337,7 @@ def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT @@ -1368,7 +1380,7 @@ def test_uss_encoding(ansible_zos_module, encoding): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {encoding} {full_path}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ENCODING @@ -1413,13 +1425,14 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) hosts.all.shell( cmd=f"iconv -f {encoding} -t IBM-1047 \"{ds_full_name}\" > \"{ds_full_name}\" " ) results = hosts.all.shell(cmd=f"cat \"//'{ds_full_name}'\" ") for result in results.contacted.values(): - assert result.get("stdout") == EXPECTED_ENCODING finally: remove_ds_environment(ansible_zos_module, ds_name) + # ds_full_name gets converted to a file too + remove_uss_environment(ansible_zos_module, ds_full_name) From b6592c50b214353e18c6188dafa765815d51741c Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Thu, 28 Aug 2025 21:27:16 +0530 Subject: [PATCH 23/73] Adding features in display command and updating testcases --- plugins/module_utils/better_arg_parser.py | 27 +++ plugins/modules/zos_started_task.py | 206 ++++++++++++++---- .../modules/test_zos_started_task_func.py | 153 ++++++++++--- 3 files changed, 322 insertions(+), 64 deletions(-) diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index d49cb46458..a42301ede4 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -152,6 +152,7 @@ def __init__(self, arg_name, contents, resolved_args, arg_defs): # TODO: determine if we should optionally allow top-level args to be passed self.type_handlers = { "dict": self._dict_type, + "basic_dict": self._basic_dict_type, "list": self._list_type, "str": self._str_type, "bool": self._bool_type, @@ -254,6 +255,32 @@ def _dict_type(self, contents, resolved_dependencies): self._assert_mutually_exclusive(contents) return contents + def _basic_dict_type(self, contents, resolve_dependencies): + """Resolver for str type arguments. + + Parameters + ---------- + contents : dict + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, + which have already been handled, + for use during current arguments handling operations. + + Returns + ------- + dict + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. + """ + if not isinstance(contents, dict): + raise ValueError('Invalid argument "{0}" for type "dict".'.format(contents)) + return contents + def _str_type(self, contents, resolve_dependencies): """Resolver for str type arguments. diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 988ba937ac..9b1616b8c8 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -29,7 +29,7 @@ options: asid: description: - - I(asid) is a unique address space identifier which gets assigned to each running task. + - I(asid) is a unique address space identifier which gets assigned to each running started task. required: false type: str device_type: @@ -124,6 +124,12 @@ - If devicetype is a tape or direct-access device, the volume serial number of the volume is mounted on the device. required: false type: str + verbose: + description: + - Return System logs that describe the task's execution. + required: false + type: bool + default: false """ EXAMPLES = r""" - name: Start a started task using member name. @@ -138,7 +144,7 @@ from ansible.module_utils.basic import AnsibleModule import traceback - +import re from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser ) @@ -150,7 +156,7 @@ ) try: - from zoautil_py import opercmd + from zoautil_py import opercmd,zsystem except ImportError: zoau_exceptions = ZOAUImportError(traceback.format_exc()) @@ -160,7 +166,7 @@ # zoau_exceptions = ZOAUImportError(traceback.format_exc()) -def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): +def execute_command(operator_cmd, started_task_name, execute_display_before=False, execute_display_after=False, timeout_s=1, *args, **kwargs): """Execute operator command. Parameters @@ -179,14 +185,29 @@ def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): OperatorQueryResult The result of the command. """ + task_params = {} # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: timeout_c = 100 * timeout_s + if execute_display_before: + task_params = execute_display_command(started_task_name, timeout_c) + response = opercmd.execute(operator_cmd, timeout_c, *args, **kwargs) + if execute_display_after: + task_params = execute_display_command(started_task_name, timeout_c) + rc = response.rc stdout = response.stdout_response stderr = response.stderr_response - return rc, stdout, stderr + return rc, stdout, stderr, task_params + +def execute_display_command(started_task_name, timeout_c): + cmd = "d a,"+started_task_name + display_response = opercmd.execute(cmd, timeout_c) + task_params = [] + if display_response.rc == 0 and display_response.stderr_response == "": + task_params = extract_keys(display_response.stdout_response) + return task_params def prepare_start_command(member, identifier, job_name, job_account, device, volume_serial, subsystem_name, reus_asid, parameters, keyword_parameters): @@ -206,6 +227,53 @@ def prepare_start_command(member, identifier, job_name, job_account, device, vol return cmd +def extract_keys(stdout): + # keys = {'A': 'ASID', 'CT': 'CPU_Time', 'ET': 'Elapsed_Time', 'WUID': 'WUID', 'USERID': 'USERID', 'P': 'Priority'} + # params = {} + # for key in keys: + # parm = re.search(rf"{key}=([^\s]+)", stdout) + # if parm: + # params[keys[key]] = parm.group(1) + # return params + lines = stdout.strip().split('\n') + tasks = [] + current_task = None + task_header_regex = re.compile(r'^\s*(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)') + kv_pattern = re.compile(r'(\S+)=(\S+)') + for line in lines[5:]: + line = line.strip() + if len(line.split()) >= 5 and task_header_regex.search(line): + if current_task: + tasks.append(current_task) + match = task_header_regex.search(line) + current_task = { + "TASK_NAME": match.group(1), + "DETAILS": {} + } + for match in kv_pattern.finditer(line): + key, value = match.groups() + current_task["DETAILS"][key] = value + elif current_task: + for match in kv_pattern.finditer(line): + key, value = match.groups() + current_task["DETAILS"][key] = value + if current_task: + tasks.append(current_task) + return tasks + + +def fetch_logs(command): + stdout = zsystem.read_console(options='-t1') + stdout_lines = stdout.splitlines() + first = None + for i, line in enumerate(stdout_lines): + if command in line: + if first is None: + first = i + if first is None: + return "" + return stdout_lines[first:] + def run_module(): """Initialize the module. @@ -216,10 +284,10 @@ def run_module(): """ module = AnsibleModule( argument_spec={ - 'operation': { + 'state': { 'type': 'str', 'required': True, - 'choices': ['start', 'stop', 'modify', 'display', 'force', 'cancel'] + 'choices': ['started', 'stopped', 'modified', 'display', 'forced', 'cancelled'] }, 'member_name': { 'type': 'str', @@ -266,25 +334,28 @@ def run_module(): 'required': False }, 'keyword_parameters': { - 'type': 'str', + 'type': 'dict', 'required': False, 'no_log': False }, 'asid': { 'type': 'str', 'required': False + }, + 'verbose': { + 'type': 'bool', + 'required': False } }, mutually_exclusive=[ - ['job_name', 'identifier_name'], ['device_number', 'device_type'] ], supports_check_mode=True ) args_def = { - 'operation': { - 'type': 'str', + 'state': { + 'arg_type': 'str', 'required': True }, 'member_name': { @@ -331,12 +402,16 @@ def run_module(): 'required': False }, 'keyword_parameters': { - 'arg_type': 'str', + 'arg_type': 'basic_dict', 'required': False }, 'asid': { 'arg_type': 'str', 'required': False + }, + 'verbose': { + 'arg_type': 'bool', + 'required': False } } @@ -349,7 +424,7 @@ def run_module(): msg='Parameter verification failed.', stderr=str(err) ) - operation = module.params.get('operation') + operation = module.params.get('state') member = module.params.get('member_name') identifier = module.params.get('identifier') job_name = module.params.get('job_name') @@ -362,8 +437,19 @@ def run_module(): subsystem_name = module.params.get('subsystem_name') reus_asid = module.params.get('reus_asid') keyword_parameters = module.params.get('keyword_parameters') + verbose = module.params.get('verbose') + keyword_parameters_string = None + if keyword_parameters is not None: + keyword_parameters_string = ','.join(f"{key}={value}" for key, value in keyword_parameters.items()) device = device_type if device_type is not None else device_number kwargs = {} + start_errmsg = ['ERROR'] + stop_errmsg = ['NOT ACTIVE'] + display_errmsg = ['NOT ACTIVE'] + modify_errmsg = ['REJECTED', 'NOT ACTIVE'] + cancel_errmsg = ['NOT ACTIVE'] + force_errmsg = ['NOT ACTIVE'] + err_msg = [] # Validations if job_account and len(job_account) > 55: @@ -378,7 +464,7 @@ def run_module(): msg="Invalid device_number.", changed=False ) - if subsystem_name and len(job_account) > 4: + if subsystem_name and len(subsystem_name) > 4: module.fail_json( msg="The subsystem_name must be 1 - 4 characters.", changed=False @@ -397,7 +483,20 @@ def run_module(): args = [] cmd = '' started_task_name = "" - if operation != 'start': + if operation != 'started': + if job_name is not None: + started_task_name = job_name + if identifier is not None: + started_task_name = started_task_name + "." + identifier + else: + module.fail_json( + msg="job_name is missing which is mandatory.", + changed=False + ) + execute_display_before = False + execute_display_after = False + if operation == 'started': + execute_display_after = True if job_name is not None: started_task_name = job_name elif member is not None: @@ -406,61 +505,94 @@ def run_module(): started_task_name = started_task_name + "." + identifier else: module.fail_json( - msg="one of job_name, member_name or identifier is needed but all are missing.", + msg="member_name is missing which is mandatory.", changed=False ) - if operation == 'start': - # member name is mandatory - if member is None or member.strip() == "": + err_msg = start_errmsg + if member is None: module.fail_json( msg="member_name is missing which is mandatory.", changed=False ) - cmd = prepare_start_command(member, identifier, job_name, job_account, device, volume_serial, subsystem_name, reus_asid, parameters, keyword_parameters) + if job_name is not None and identifier is not None: + module.fail_json( + msg="job_name and identifier_name are mutually exclusive while starting a started task.", + changed=False + ) + cmd = prepare_start_command(member, identifier, job_name, job_account, device, volume_serial, subsystem_name, reus_asid, parameters, keyword_parameters_string) elif operation == 'display': + err_msg = display_errmsg cmd = 'd a,' + started_task_name - elif operation == 'stop': + elif operation == 'stopped': + execute_display_before = True + err_msg = stop_errmsg cmd = 'p ' + started_task_name - elif operation == 'cancel': + if asid: + cmd = cmd + ',a=' + asid + elif operation == 'cancelled': + execute_display_before = True + err_msg = cancel_errmsg cmd = 'c ' + started_task_name if asid: cmd = cmd + ',a=' + asid - elif operation == 'force': + elif operation == 'forced': + execute_display_before = True + err_msg = force_errmsg cmd = 'force ' + started_task_name if asid: cmd = cmd + ',a=' + asid - elif operation == 'modify': + elif operation == 'modified': + execute_display_after = True + err_msg = modify_errmsg cmd = 'f ' + started_task_name + ',' + parameters changed = False stdout = "" stderr = "" - rc, out, err = execute_command(cmd, timeout_s=wait_s, *args, **kwargs) - if "ERROR" in out or err != "": + rc, out, err, task_params = execute_command(cmd, started_task_name, execute_display_before, execute_display_after, timeout_s=wait_s, *args, **kwargs) + logs = fetch_logs(cmd.upper()) # it will display both start/display logs + logs_str = "\n".join(logs) + if any(msg in out for msg in err_msg) or any(msg in logs_str for msg in err_msg) or err != "": changed = False stdout = out stderr = err if err == "" or err is None: stderr = out + stdout = "" else: changed = True stdout = out stderr = err + if operation == 'display': + task_params = extract_keys(out) result = dict() if module.check_mode: module.exit_json(**result) - - result = dict( - changed=changed, - cmd=cmd, - remote_cmd=cmd, - rc=rc, - stdout=stdout, - stderr=stderr, - stdout_lines=stdout.split('\n'), - stderr_lines=stderr.split('\n'), - ) + + if verbose: + result = dict( + changed=changed, + cmd=cmd, + task=task_params, + rc=rc, + verbose_output=logs_str, + stdout=stdout, + stderr=stderr, + stdout_lines=stdout.split('\n'), + stderr_lines=stderr.split('\n'), + ) + else: + result = dict( + changed=changed, + cmd=cmd, + task=task_params, + rc=rc, + stdout=stdout, + stderr=stderr, + stdout_lines=stdout.split('\n'), + stderr_lines=stderr.split('\n'), + ) module.exit_json(**result) diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index 5529ff4f62..486ca72a0e 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -29,14 +29,20 @@ //STDPARM DD * SH sleep 600 /*""" +PROC_JCL_CONTENT="""//TEST PROC TIME=6 +//STEP1 EXEC PGM=BPXBATCH +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//STDPARM DD *,SYMBOLS=EXECSYS +SH sleep &TIME +/*""" def test_start_task_with_invalid_member(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - operation="start", + state="started", member="SAMPLETASK" - ) - + ) for result in start_results.contacted.values(): print(result) assert result.get("changed") is False @@ -46,10 +52,10 @@ def test_start_task_with_invalid_member(ansible_zos_module): def test_start_task_with_invalid_identifier(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - operation="start", + state="started", member="SAMPLE", identifier="$HELLO" - ) + ) for result in start_results.contacted.values(): print(result) @@ -61,10 +67,10 @@ def test_start_task_with_invalid_jobaccount(ansible_zos_module): hosts = ansible_zos_module job_account = "(T043JM,JM00,1,0,0,This is the invalid job account information to test negative scenario)" start_results = hosts.all.zos_started_task( - operation="start", + state="started", member="SAMPLE", job_account=job_account - ) + ) for result in start_results.contacted.values(): print(result) @@ -75,10 +81,10 @@ def test_start_task_with_invalid_jobaccount(ansible_zos_module): def test_start_task_with_invalid_devicenum(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - operation="start", + state="started", member="SAMPLE", device_number="0870" - ) + ) for result in start_results.contacted.values(): print(result) @@ -90,6 +96,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() + print(data_set_name) temp_path = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.file(path=temp_path, state="directory") hosts.all.shell( @@ -101,12 +108,12 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): ) hosts.all.shell( - cmd="dcp {0} \"//'{1}(SAMPLE)'\"".format(data_set_name, PROC_PDS) + cmd="dcp {0} '{1}(SAMPLE)'".format(data_set_name, PROC_PDS) ) start_results = hosts.all.zos_started_task( - operation="start", - member="SAMPLE" + state="started", + member="SAMPLE" ) for result in start_results.contacted.values(): @@ -116,8 +123,8 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("stderr") == "" stop_results = hosts.all.zos_started_task( - operation="cancel", - task_name="SAMPLE" + state="cancelled", + task_name="SAMPLE" ) for result in stop_results.contacted.values(): @@ -125,10 +132,12 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" - + + job_account = "(T043JM,JM00,1,0,0,)" start_results = hosts.all.zos_started_task( - operation="start", - member="SAMPLE" + state="started", + member="SAMPLE", + job_account=job_account ) for result in start_results.contacted.values(): @@ -138,8 +147,8 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("stderr") == "" display_result = hosts.all.zos_started_task( - operation="display", - task_name="SAMPLE" + state="display", + task_name="SAMPLE" ) for result in display_result.contacted.values(): print(result) @@ -150,9 +159,9 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) stop_results = hosts.all.zos_started_task( - operation="cancel", - task_name="SAMPLE", - asid=asid_val + state="cancelled", + task_name="SAMPLE", + asid=asid_val ) for result in stop_results.contacted.values(): @@ -190,9 +199,99 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): ) start_results = hosts.all.zos_started_task( - operation="start", - member="SAMPLE", - job_name="TESTTSK" + state="started", + member="SAMPLE", + job_name="TESTTSK" + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + stop_results = hosts.all.zos_started_task( + state="cancelled", + task_name="TESTTSK" + ) + + for result in stop_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + finally: + hosts.all.file(path=temp_path, state="absent") + hosts.all.shell( + cmd="drm {0}".format(data_set_name) + ) + hosts.all.shell( + cmd="mrm '{0}(SAMPLE)'".format(PROC_PDS) + ) + +def test_stop_and_modify_with_vlf_task(ansible_zos_module): + hosts = ansible_zos_module + + stop_results = hosts.all.zos_started_task( + state="stopped", + task_name="vlf" + ) + + for result in stop_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + start_results = hosts.all.zos_started_task( + state="started", + member="vlf", + subsystem_name="mstr" + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + modify_results = hosts.all.zos_started_task( + state="modified", + task_name="vlf", + parameters="replace,nn=00" + ) + + for result in modify_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + +def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) + hosts.all.file(path=temp_path, state="directory") + + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(PROC_JCL_CONTENT), temp_path) + ) + + hosts.all.shell( + cmd="dcp {0}/SAMPLE {1}".format(temp_path, data_set_name) + ) + + hosts.all.shell( + cmd="dcp {0} \"//'{1}(SAMPLE)'\"".format(data_set_name, PROC_PDS) + ) + + start_results = hosts.all.zos_started_task( + state="started", + member="SAMPLE", + job_name="SPROC" ) for result in start_results.contacted.values(): @@ -202,8 +301,8 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): assert result.get("stderr") == "" stop_results = hosts.all.zos_started_task( - operation="cancel", - task_name="TESTTSK" + state="cancelled", + task_name="SPROC" ) for result in stop_results.contacted.values(): From 84fce8aea51a1a7d9cf4b016b457d92454a4a5e1 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Fri, 29 Aug 2025 10:04:48 -0600 Subject: [PATCH 24/73] [Enabler] Remove dependencies from tests when testing zos_data_set (#2259) * Removed zos_data_set from test_zos_volume_init * Removed zos_data_set from zos_job_output tests * Fixed volume init test * Removed commented lines * Removed commented lines * Removed zos_data_set calls from zos_mount tests * Commented zos_data_set from zos_backup_resore * Removed zos_data_set from zos_mount and zos_backup_restore test cases * Removed zos_data_set from zos_lineinfile tests * Updated path msg * modified blockinfile tests * Updated blockinfile * Fixed copy changing force to replace * Added changelog --- .../2259-reduce-test-dep-zos_data_set.yml | 13 ++ plugins/action/zos_script.py | 2 +- plugins/action/zos_unarchive.py | 2 +- .../modules/test_zos_backup_restore.py | 18 +-- .../modules/test_zos_blockinfile_func.py | 120 ++++++++++-------- .../modules/test_zos_job_output_func.py | 11 +- .../modules/test_zos_job_query_func.py | 24 ++-- .../modules/test_zos_lineinfile_func.py | 62 +++------ .../functional/modules/test_zos_mount_func.py | 42 +----- .../modules/test_zos_volume_init_func.py | 6 +- 10 files changed, 136 insertions(+), 164 deletions(-) create mode 100644 changelogs/fragments/2259-reduce-test-dep-zos_data_set.yml diff --git a/changelogs/fragments/2259-reduce-test-dep-zos_data_set.yml b/changelogs/fragments/2259-reduce-test-dep-zos_data_set.yml new file mode 100644 index 0000000000..243ece7ba2 --- /dev/null +++ b/changelogs/fragments/2259-reduce-test-dep-zos_data_set.yml @@ -0,0 +1,13 @@ +trivial: + - test_zos_backup_restore.py - Removed calls to zos_data_set that would increase the test case dependency between the modules. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259). + - test_zos_blockinfile_func.py - Removed calls to zos_data_set that would increase the test case dependency between the modules. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259). + - test_zos_job_output_func.py - Removed calls to zos_data_set that would increase the test case dependency between the modules. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259). + - test_zos_job_query_func.py - Removed calls to zos_data_set that would increase the test case dependency between the modules. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259). + - test_zos_mount_func.py - Removed calls to zos_data_set that would increase the test case dependency between the modules. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259). + - test_zos_volume_init_func.py - Removed calls to zos_data_set that would increase the test case dependency between the modules. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259). diff --git a/plugins/action/zos_script.py b/plugins/action/zos_script.py index d3d348fa9c..de4c5a9158 100644 --- a/plugins/action/zos_script.py +++ b/plugins/action/zos_script.py @@ -92,7 +92,7 @@ def run(self, tmp=None, task_vars=None): copy_module_args = dict( src=script_path, dest=tempfile_path, - force=True, + replace=True, is_binary=False, encoding=module_args.get('encoding'), use_template=module_args.get('use_template', False), diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index 9f5d9d347f..a6670ea7b3 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -100,7 +100,7 @@ def run(self, tmp=None, task_vars=None): src=source, dest=dest, dest_data_set=dest_data_set, - force=force, + replace=force, is_binary=True, ) ) diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index 9911b86a92..ecaf87c871 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -72,9 +72,9 @@ def create_sequential_data_set_with_contents( hosts, data_set_name, contents, volume=None ): if volume is not None: - results = hosts.all.zos_data_set(name=data_set_name, type="seq", volumes=volume) + results = hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{data_set_name}'") else: - results = hosts.all.zos_data_set(name=data_set_name, type="seq") + results = hosts.all.shell(cmd=f"dtouch -tseq '{data_set_name}'") assert_module_did_not_fail(results) results = hosts.all.shell("decho '{0}' {1}".format(contents, data_set_name)) assert_module_did_not_fail(results) @@ -93,7 +93,7 @@ def delete_data_set_or_file(hosts, name): def delete_data_set(hosts, data_set_name): - hosts.all.zos_data_set(name=data_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{data_set_name}'") def delete_file(hosts, path): @@ -899,15 +899,15 @@ def test_backup_gds(ansible_zos_module, dstype): # We need to replace hyphens because of NAZARE-10614: dzip fails archiving data set names with '-' data_set_name = get_tmp_ds_name(symbols=True).replace("-", "") backup_dest = get_tmp_ds_name(symbols=True).replace("-", "") - results = hosts.all.zos_data_set(name=data_set_name, state="present", type="gdg", limit=3) + results = hosts.all.shell(cmd=f"dtouch -tGDG -L3 '{data_set_name}'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{data_set_name}(+1)'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{data_set_name}(+1)'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None @@ -935,15 +935,15 @@ def test_backup_into_gds(ansible_zos_module, dstype): # We need to replace hyphens because of NAZARE-10614: dzip fails archiving data set names with '-' data_set_name = get_tmp_ds_name(symbols=True).replace("-", "") ds_name = get_tmp_ds_name(symbols=True).replace("-", "") - results = hosts.all.zos_data_set(name=data_set_name, state="present", type="gdg", limit=3) + results = hosts.all.shell(cmd=f"dtouch -tGDG -L3 '{data_set_name}'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{data_set_name}(+1)'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.zos_data_set(name=ds_name, state="present", type=dstype) + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{ds_name}'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index ed739e81a4..146af86b95 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -464,10 +464,12 @@ def remove_uss_environment(ansible_zos_module, file): def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content): hosts = ansible_zos_module hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_data_set(name=ds_name, type=ds_type) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_name}'") + # hosts.all.zos_data_set(name=ds_name, type=ds_type) if ds_type in ["pds", "pdse"]: ds_full_name = ds_name + "(MEM)" - hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") + hosts.all.shell(cmd=f"decho '' '{ds_full_name}'") + # hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") cmd_str = f"cp -CM {quote(temp_file)} \"//'{ds_full_name}'\"" else: ds_full_name = ds_name @@ -478,7 +480,8 @@ def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) def remove_ds_environment(ansible_zos_module, ds_name): hosts = ansible_zos_module - hosts.all.zos_data_set(name=ds_name, state="absent") + hosts.all.shell(cmd=f"drm '{ds_name}'") + # hosts.all.zos_data_set(name=ds_name, state="absent") ######################### # USS test cases @@ -1322,7 +1325,8 @@ def test_ds_tmp_hlq_option(ansible_zos_module): try: ds_full_name = get_tmp_ds_name() temp_file = get_random_file_name(dir=TMP_DIRECTORY) - hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_full_name}'") + # hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " hosts.all.shell(cmd=cmd_str) @@ -1336,7 +1340,8 @@ def test_ds_tmp_hlq_option(ansible_zos_module): for key in kwargs: assert kwargs.get(key) in result.get(key) finally: - hosts.all.zos_data_set(name=ds_full_name, state="absent") + hosts.all.shell(cmd=f"drm '{ds_full_name}'") + # hosts.all.zos_data_set(name=ds_full_name, state="absent") hosts.all.file(name=temp_file, state="absent") @@ -1410,7 +1415,8 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup finally: remove_ds_environment(ansible_zos_module, ds_name) if backup_ds_name != "": - ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") + hosts.all.shell(cmd=f"drm '{backup_ds_name}'") + # ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") @@ -1435,24 +1441,28 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): else: params["path"] = f"{default_data_set_name}({member_2})" try: + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{default_data_set_name}'") # set up: - hosts.all.zos_data_set( - name=default_data_set_name, - state="present", - type=ds_type, - replace=True - ) + # hosts.all.zos_data_set( + # name=default_data_set_name, + # state="present", + # type=ds_type, + # replace=True + # ) hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_data_set( - batch=[ - { - "name": f"{default_data_set_name}({member_1})", - "type": "member", - "state": "present", "replace": True, }, - { "name": params["path"], "type": "member", - "state": "present", "replace": True, }, - ] - ) + # Create two empty members + hosts.all.shell(cmd=f"decho '' '{default_data_set_name}({member_1})'") + hosts.all.shell(cmd=f"decho '' '{params['path']}'") + # hosts.all.zos_data_set( + # batch=[ + # { + # "name": f"{default_data_set_name}({member_1})", + # "type": "member", + # "state": "present", "replace": True, }, + # { "name": params["path"], "type": "member", + # "state": "present", "replace": True, }, + # ] + # ) # write memeber to verify cases if ds_type in ["pds", "pdse"]: cmd_str = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), params["path"]) @@ -1486,7 +1496,8 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") hosts.all.shell(cmd='rm -r {0}'.format(path)) - hosts.all.zos_data_set(name=default_data_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{default_data_set_name}'") + # hosts.all.zos_data_set(name=default_data_set_name, state="absent") @pytest.mark.ds @@ -1545,7 +1556,8 @@ def test_special_characters_ds_insert_block(ansible_zos_module): ds_name = get_tmp_ds_name(5, 5, symbols=True) backup = get_tmp_ds_name(6, 6, symbols=True) try: - result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") + hosts.all.shell(cmd=f"dtouch -tseq '{ds_name}'") + # result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") params["src"] = ds_name results = hosts.all.zos_blockinfile(**params) @@ -1613,7 +1625,8 @@ def test_special_characters_ds_insert_block(ansible_zos_module): ds_name = get_tmp_ds_name(5, 5, symbols=True) backup = get_tmp_ds_name(6, 6, symbols=True) try: - result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") + hosts.all.shell(cmd=f"dtouch -tseq '{ds_name}'") + # result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") params["src"] = ds_name results = hosts.all.zos_blockinfile(**params) @@ -1715,7 +1728,8 @@ def test_ds_not_supported(ansible_zos_module, dstype): ds_name = get_tmp_ds_name() try: ds_name = ds_name.upper() + "." + ds_type - results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') + results = hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_name}'") + # results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') for result in results.contacted.values(): assert result.get("changed") is True params["path"] = ds_name @@ -1724,7 +1738,8 @@ def test_ds_not_supported(ansible_zos_module, dstype): assert result.get("changed") is False assert result.get("msg") == "VSAM data set type is NOT supported" finally: - hosts.all.zos_data_set(name=ds_name, state="absent") + hosts.all.shell(cmd=f"drm '{ds_name}'") + # hosts.all.zos_data_set(name=ds_name, state="absent") # Enhancemed #1339 @@ -1746,30 +1761,34 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): params["path"] = f"{default_data_set_name}({member_2})" content = TEST_CONTENT try: + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{default_data_set_name}'") # set up: - hosts.all.zos_data_set( - name=default_data_set_name, - state="present", - type=ds_type, - replace=True - ) + # hosts.all.zos_data_set( + # name=default_data_set_name, + # state="present", + # type=ds_type, + # replace=True + # ) hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_data_set( - batch=[ - { - "name": f"{default_data_set_name}({member_1})", - "type": "member", - "state": "present", - "replace": True, - }, - { - "name": params["path"], - "type": "member", - "state": "present", - "replace": True, - }, - ] - ) + # Create two empty members + hosts.all.shell(cmd=f"decho '' '{default_data_set_name}({member_1})'") + hosts.all.shell(cmd=f"decho '' '{params['path']}'") + # hosts.all.zos_data_set( + # batch=[ + # { + # "name": f"{default_data_set_name}({member_1})", + # "type": "member", + # "state": "present", + # "replace": True, + # }, + # { + # "name": params["path"], + # "type": "member", + # "state": "present", + # "replace": True, + # }, + # ] + # ) cmd_str = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file) ,params["path"]) hosts.all.shell(cmd=cmd_str) results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) @@ -1796,4 +1815,5 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") hosts.all.shell(cmd='rm -r {0}'.format(path)) - hosts.all.zos_data_set(name=default_data_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{default_data_set_name}'") + # hosts.all.zos_data_set(name=default_data_set_name, state="absent") diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 7ea1a06d5f..4d4c82e227 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -329,24 +329,17 @@ def test_zos_job_output_job_exists_with_sysin(ansible_zos_module): hosts = ansible_zos_module hosts.all.file(path=TEMP_PATH, state="directory") data_set_name = get_tmp_ds_name() - result = hosts.all.zos_data_set( - name=data_set_name, - type="seq", - state="present" - ) - print(f"job_submit_result: {result.contacted.values()}") + hosts.all.shell(cmd=f"dtouch -tseq '{data_set_name}'") hosts.all.shell( cmd=f"echo {quote(JCL_FILE_CONTENTS_SYSIN.format(data_set_name))} > {TEMP_PATH}/SYSIN" ) result = hosts.all.zos_job_submit( src=f"{TEMP_PATH}/SYSIN", remote_src=True, volume=None ) - print(f"job_submit_result: {result.contacted.values()}") hosts.all.file(path=TEMP_PATH, state="absent") sysin = True results = hosts.all.zos_job_output(job_name="SYSINS", input=sysin) for result in results.contacted.values(): - print(f"job_output_result: {result}") assert result.get("changed") is True for job in result.get("jobs"): assert len(job.get("dds")) >= 1 @@ -357,7 +350,7 @@ def test_zos_job_output_job_exists_with_sysin(ansible_zos_module): break assert sysin_found finally: - hosts.all.zos_data_set(name=data_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{data_set_name}'") hosts.all.file(path=TEMP_PATH, state="absent") diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index b9160d7d2f..f21cd2c464 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -98,20 +98,18 @@ def test_zos_job_query_func(ansible_zos_module): def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): try: hosts = ansible_zos_module - jdata_set_name = get_tmp_ds_name() + data_set_name = get_tmp_ds_name() temp_path = get_random_file_name(dir=TEMP_PATH) hosts.all.file(path=temp_path, state="directory") hosts.all.shell( cmd=f"echo {quote(JCLQ_FILE_CONTENTS)} > {temp_path}/SAMPLE" ) - hosts.all.zos_data_set( - name=jdata_set_name, state="present", type="pds", replace=True - ) + hosts.all.shell(cmd=f"dtouch -tpds '{data_set_name}'") hosts.all.shell( - cmd=f"cp {temp_path}/SAMPLE \"//'{jdata_set_name}(SAMPLE)'\"" + cmd=f"cp {temp_path}/SAMPLE \"//'{data_set_name}(SAMPLE)'\"" ) results = hosts.all.zos_job_submit( - src=f"{jdata_set_name}(SAMPLE)", remote_src=True, wait_time=10 + src=f"{data_set_name}(SAMPLE)", remote_src=True, wait_time=10 ) for result in results.contacted.values(): assert result.get("changed") is True @@ -188,27 +186,25 @@ def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): finally: hosts.all.file(path=temp_path, state="absent") - hosts.all.zos_data_set(name=jdata_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{data_set_name}'") # test to show multi wildcard in Job_name query won't crash the search def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): try: hosts = ansible_zos_module - ndata_set_name = get_tmp_ds_name() + data_set_name = get_tmp_ds_name() temp_path = get_random_file_name(dir=TEMP_PATH) hosts.all.file(path=temp_path, state="directory") hosts.all.shell( cmd=f"echo {quote(JCLQ_FILE_CONTENTS)} > {temp_path}/SAMPLE" ) - hosts.all.zos_data_set( - name=ndata_set_name, state="present", type="pds", replace=True - ) + hosts.all.shell(cmd=f"dtouch -tpds '{data_set_name}'") hosts.all.shell( - cmd=f"cp {temp_path}/SAMPLE \"//'{ndata_set_name}(SAMPLE)'\"" + cmd=f"cp {temp_path}/SAMPLE \"//'{data_set_name}(SAMPLE)'\"" ) results = hosts.all.zos_job_submit( - src=f"{ndata_set_name}(SAMPLE)", remote_src=True, wait_time=10 + src=f"{data_set_name}(SAMPLE)", remote_src=True, wait_time=10 ) for result in results.contacted.values(): assert result.get("changed") is True @@ -283,7 +279,7 @@ def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") - hosts.all.zos_data_set(name=ndata_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{data_set_name}'") def test_zos_job_id_query_short_ids_func(ansible_zos_module): diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index aaecb3c306..3e709a99ff 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -275,10 +275,10 @@ def remove_uss_environment(ansible_zos_module, file): def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content): hosts = ansible_zos_module hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_data_set(name=ds_name, type=ds_type) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_name}'") if ds_type in ["pds", "pdse"]: ds_full_name = ds_name + "(MEM)" - hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") + hosts.all.shell(cmd=f"decho '' '{ds_full_name}'") cmd_str = f"cp -CM {quote(temp_file)} \"//'{ds_full_name}'\"" else: ds_full_name = ds_name @@ -289,7 +289,7 @@ def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) def remove_ds_environment(ansible_zos_module, ds_name): hosts = ansible_zos_module - hosts.all.zos_data_set(name=ds_name, state="absent") + hosts.all.shell(cmd=f"drm '{ds_name}'") # supported data set types ds_type = ['seq', 'pds', 'pdse'] @@ -966,7 +966,7 @@ def test_special_characters_ds_insert_line(ansible_zos_module): backup = get_tmp_ds_name(6, 6, symbols=True) try: # Set environment - result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") + hosts.all.shell(cmd=f"dtouch -tseq '{ds_name}'") params["src"] = ds_name results = hosts.all.zos_lineinfile(**params) @@ -1139,7 +1139,7 @@ def test_ds_tmp_hlq_option(ansible_zos_module): try: ds_full_name = get_tmp_ds_name() temp_file = get_random_file_name(dir=TMP_DIRECTORY) - hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_full_name}'") hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " hosts.all.shell(cmd=cmd_str) @@ -1154,7 +1154,7 @@ def test_ds_tmp_hlq_option(ansible_zos_module): for key in kwargs: assert kwargs.get(key) in result.get(key) finally: - hosts.all.zos_data_set(name=ds_full_name, state="absent") + hosts.all.shell(cmd=f"drm '{ds_full_name}'") ## Non supported test cases @@ -1171,7 +1171,7 @@ def test_ds_not_supported(ansible_zos_module, dstype): } try: ds_name = get_tmp_ds_name() + "." + ds_type - results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') + results = hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_name}'") for result in results.contacted.values(): assert result.get("changed") is True params["path"] = ds_name @@ -1180,7 +1180,7 @@ def test_ds_not_supported(ansible_zos_module, dstype): assert result.get("changed") is False assert result.get("msg") == "VSAM data set type is NOT supported" finally: - hosts.all.zos_data_set(name=ds_name, state="absent") + hosts.all.shell(cmd=f"drm '{ds_name}'") @pytest.mark.ds @@ -1205,22 +1205,12 @@ def test_ds_line_force(ansible_zos_module, dstype): params["path"] = f"{default_data_set_name}({member_2})" try: # set up: - hosts.all.zos_data_set( - name=default_data_set_name, - state="present", - type=ds_type, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{default_data_set_name}'") hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_data_set( - batch=[ - { "name": f"{default_data_set_name}({member_1})", - "type": "member", "state": "present", "replace": True, }, - { "name": params["path"], "type": "member", - "state": "present", "replace": True, }, - ] - ) - # write memeber to verify cases + # Create two empty members + hosts.all.shell(cmd=f"decho '' '{default_data_set_name}({member_1})'") + hosts.all.shell(cmd=f"decho '' '{params['path']}'") + # write member to verify cases if ds_type in ["pds", "pdse"]: cmd_str = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), params["path"]) else: @@ -1253,7 +1243,7 @@ def test_ds_line_force(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") hosts.all.shell(cmd='rm -r {0}'.format(path)) - hosts.all.zos_data_set(name=default_data_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{default_data_set_name}*'") @pytest.mark.ds @@ -1275,21 +1265,11 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): content = TEST_CONTENT try: # set up: - hosts.all.zos_data_set( - name=default_data_set_name, - state="present", - type=ds_type, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{default_data_set_name}'") hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_data_set( - batch=[ - { "name": f"{default_data_set_name}({member_1})", - "type": "member", "state": "present", "replace": True, }, - { "name": params["path"], "type": "member", - "state": "present", "replace": True, }, - ] - ) + # Create two empty members + hosts.all.shell(cmd=f"decho '' '{default_data_set_name}({member_1})'") + hosts.all.shell(cmd=f"decho '' '{params['path']}'") cmd_str = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), params["path"]) hosts.all.shell(cmd=cmd_str) results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) @@ -1316,7 +1296,7 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") hosts.all.shell(cmd='rm -r {0}'.format(path)) - hosts.all.zos_data_set(name=default_data_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{default_data_set_name}*'") @pytest.mark.ds @@ -1411,10 +1391,10 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): try: hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {params['encoding']} temp_file > temp_file ") - hosts.all.zos_data_set(name=ds_name, type=ds_type) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_name}'") if ds_type in ["pds", "pdse"]: ds_full_name = ds_name + "(MEM)" - hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") + hosts.all.shell(cmd=f"decho '' '{ds_full_name}'") cmd_str = f"cp -CM {quote(temp_file)} \"//'{ds_full_name}'\"" else: ds_full_name = ds_name diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py index 3cdf502e1e..a47dba912b 100644 --- a/tests/functional/modules/test_zos_mount_func.py +++ b/tests/functional/modules/test_zos_mount_func.py @@ -209,14 +209,7 @@ def test_basic_mount_with_bpx_nomarker_nobackup(ansible_zos_module, volumes_on_s dest = get_tmp_ds_name() dest_path = dest + "(AUTO1)" - hosts.all.zos_data_set( - name=dest, - type="pdse", - space_primary=5, - space_type="m", - record_format="fba", - record_length=80, - ) + hosts.all.shell(cmd=f"dtouch -tpdse -s5M -IFBA -l80 {dest}") print("\nbnn-Copying {0} to {1}\n".format(tmp_file_filename, dest_path)) hosts.all.zos_copy( src=tmp_file_filename, @@ -252,15 +245,7 @@ def test_basic_mount_with_bpx_nomarker_nobackup(ansible_zos_module, volumes_on_s ) hosts.all.file(path=tmp_file_filename, state="absent") hosts.all.file(path="/pythonx/", state="absent") - hosts.all.zos_data_set( - name=dest, - state="absent", - type="pdse", - space_primary=5, - space_type="m", - record_format="fba", - record_length=80, - ) + hosts.all.shell(cmd=f"drm {dest}") def test_basic_mount_with_bpx_no_utf_8_characters(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module @@ -365,14 +350,7 @@ def test_basic_mount_with_bpx_marker_backup(ansible_zos_module, volumes_on_syste dest_path = dest + "(AUTO2)" back_dest_path = dest + "(AUTO2BAK)" - hosts.all.zos_data_set( - name=dest, - type="pdse", - space_primary=5, - space_type="m", - record_format="fba", - record_length=80, - ) + hosts.all.shell(cmd=f"dtouch -tpdse -s5M -IFBA -l80 {dest}") print("\nbcb-Copying {0} to {1}\n".format(tmp_file_filename, dest_path)) hosts.all.zos_copy( @@ -438,15 +416,7 @@ def test_basic_mount_with_bpx_marker_backup(ansible_zos_module, volumes_on_syste hosts.all.file(path=tmp_file_filename, state="absent") hosts.all.file(path=test_tmp_file_filename, state="absent") hosts.all.file(path="/pythonx/", state="absent") - hosts.all.zos_data_set( - name=dest, - state="absent", - type="pdse", - space_primary=5, - space_type="m", - record_format="fba", - record_length=80, - ) + hosts.all.shell(cmd=f"drm {dest}") def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module @@ -464,7 +434,7 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems) finally: tmphlq = "TMPHLQ" persist_data_set = get_tmp_ds_name() - hosts.all.zos_data_set(name=persist_data_set, state="present", type="seq") + hosts.all.shell(cmd=f"dtouch -tseq {persist_data_set}") unmount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", @@ -479,7 +449,7 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems) stdin="", ) - hosts.all.zos_data_set(name=persist_data_set, state="absent") + hosts.all.shell(cmd=f"drm {persist_data_set}") for result in unmount_result.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" diff --git a/tests/functional/modules/test_zos_volume_init_func.py b/tests/functional/modules/test_zos_volume_init_func.py index 8f7f58bfe4..f5620d351c 100644 --- a/tests/functional/modules/test_zos_volume_init_func.py +++ b/tests/functional/modules/test_zos_volume_init_func.py @@ -490,7 +490,7 @@ def test_bad_param_volid_value_too_long(ansible_zos_module, volumes_unit_on_syst hosts.all.zos_operator(cmd=f"vary {address},online") -# Note - volume needs to be sms managed for zos_data_set to work. Possible +# Note - volume needs to be sms managed for data set creation to work. Possible # points of failure are: # unable to init volume first time around # unable to allocate data set @@ -530,7 +530,7 @@ def test_no_existing_data_sets_check(ansible_zos_module, volumes_unit_on_systems hosts.all.zos_operator(cmd=f"vary {address},online") # allocate data set to volume - hosts.all.zos_data_set(name=dataset, type='pds', volumes=volume) + hosts.all.shell(cmd=f"dtouch -tpds -V{volume} '{dataset}'") # take volume back offline hosts.all.zos_operator(cmd=f"vary {address},offline") @@ -550,7 +550,7 @@ def test_no_existing_data_sets_check(ansible_zos_module, volumes_unit_on_systems hosts.all.zos_operator(cmd=f"vary {address},online") # remove data set - hosts.all.zos_data_set(name=dataset, state='absent') + hosts.all.shell(cmd=f"drm '{dataset}'") # Note - technically verify_offline is not REQUIRED but it defaults to True From fd063b706d09313668a1647e8ce55061074a701d Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Sat, 30 Aug 2025 22:19:08 +0530 Subject: [PATCH 25/73] update parameters --- plugins/modules/zos_started_task.py | 36 +++++++++++++++++-- .../modules/test_zos_started_task_func.py | 21 ++++++----- 2 files changed, 46 insertions(+), 11 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 9b1616b8c8..21bafd7470 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -130,6 +130,14 @@ required: false type: bool default: false + wait_time_s: + required: false + default: 5 + type: int + description: + - Option I(wait_time_s) is the total time that module + L(zos_started_tak,./zos_started_task.html) will wait for a submitted task. The time begins when the module is executed + on the managed node. """ EXAMPLES = r""" - name: Start a started task using member name. @@ -228,7 +236,7 @@ def prepare_start_command(member, identifier, job_name, job_account, device, vol def extract_keys(stdout): - # keys = {'A': 'ASID', 'CT': 'CPU_Time', 'ET': 'Elapsed_Time', 'WUID': 'WUID', 'USERID': 'USERID', 'P': 'Priority'} + keys = {'A': 'ASID', 'CT': 'CPU_Time', 'ET': 'Elapsed_Time', 'WUID': 'WUID', 'USERID': 'USERID', 'P': 'Priority'} # params = {} # for key in keys: # parm = re.search(rf"{key}=([^\s]+)", stdout) @@ -252,10 +260,14 @@ def extract_keys(stdout): } for match in kv_pattern.finditer(line): key, value = match.groups() + if key in keys: + key = keys[key] current_task["DETAILS"][key] = value elif current_task: for match in kv_pattern.finditer(line): key, value = match.groups() + if key in keys: + key = keys[key] current_task["DETAILS"][key] = value if current_task: tasks.append(current_task) @@ -345,6 +357,11 @@ def run_module(): 'verbose': { 'type': 'bool', 'required': False + }, + 'wait_time_s': { + 'type': 'int', + 'required': False, + 'default': 5 } }, mutually_exclusive=[ @@ -412,6 +429,10 @@ def run_module(): 'verbose': { 'arg_type': 'bool', 'required': False + }, + 'wait_time_s': { + 'arg_type': 'int', + 'required': False } } @@ -437,10 +458,21 @@ def run_module(): subsystem_name = module.params.get('subsystem_name') reus_asid = module.params.get('reus_asid') keyword_parameters = module.params.get('keyword_parameters') + wait_time_s = module.params.get('wait_time_s') verbose = module.params.get('verbose') keyword_parameters_string = None if keyword_parameters is not None: - keyword_parameters_string = ','.join(f"{key}={value}" for key, value in keyword_parameters.items()) + # keyword_parameters_string = ','.join(f"{key}={value}" for key, value in keyword_parameters.items()) + for key, value in keyword_parameters.items(): + key_len = len(key) + value_len = len(value) + if key_len > 44 or value_len > 44 or key_len + value_len > 65: + module.fail_json( + msg="The length of a keyword=option is exceeding 66 characters or length of an individual value is exceeding 44 characters. key:{0}, value:{1}".format(key, value), + changed=False + ) + else: + keyword_parameters_string = ','.join(f"{key}={value}") device = device_type if device_type is not None else device_number kwargs = {} start_errmsg = ['ERROR'] diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index 486ca72a0e..f0f1785bf4 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -29,13 +29,15 @@ //STDPARM DD * SH sleep 600 /*""" -PROC_JCL_CONTENT="""//TEST PROC TIME=6 -//STEP1 EXEC PGM=BPXBATCH +PROC_JCL_CONTENT="""//TESTERS PROC +//TEST JOB MSGCLASS=A,NOTIFY=&SYSUID +//STEP1 EXEC PGM=BPXBATCH,PARM='SH' //STDOUT DD SYSOUT=* //STDERR DD SYSOUT=* //STDPARM DD *,SYMBOLS=EXECSYS -SH sleep &TIME -/*""" +SH sleep 60 +/* +//PEND""" def test_start_task_with_invalid_member(ansible_zos_module): hosts = ansible_zos_module @@ -277,7 +279,7 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): hosts.all.file(path=temp_path, state="directory") hosts.all.shell( - cmd="echo {0} > {1}/SAMPLE".format(quote(PROC_JCL_CONTENT), temp_path) + cmd="echo {0} > {1}/SAMPLE".format(quote(TASK_JCL_CONTENT), temp_path) ) hosts.all.shell( @@ -285,13 +287,14 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): ) hosts.all.shell( - cmd="dcp {0} \"//'{1}(SAMPLE)'\"".format(data_set_name, PROC_PDS) + cmd="dcp {0} \"//'{1}(SAMPLE2)'\"".format(data_set_name, PROC_PDS) ) start_results = hosts.all.zos_started_task( state="started", - member="SAMPLE", - job_name="SPROC" + member="SAMPLE2", + job_name="SPROC", + verbose=True ) for result in start_results.contacted.values(): @@ -317,5 +320,5 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): cmd="drm {0}".format(data_set_name) ) hosts.all.shell( - cmd="mrm '{0}(SAMPLE)'".format(PROC_PDS) + cmd="mrm '{0}(SAMPLE2)'".format(PROC_PDS) ) \ No newline at end of file From f18e6d9843998418a78b4bbf96b1cb6923fcdc60 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Mon, 1 Sep 2025 11:36:00 -0600 Subject: [PATCH 26/73] [Enhancement][zos_archive][2.0] Update module interface (#2256) * Refactor name to type * Updated tests * Corrected some dls commands so that it only fetches for the correct data set * Enhanced last test * Update test_zos_archive_func.py * Fixed test * Modified spack keywrod * Replace use_adrdssu keyword * Documented dest * Updated docs * Updated options * Updated aliases * Updated format options aliaases * Updated argument spec with format options alias * Added changelogs * Updated aliases * Removed arg parser validation not needed * Fixed zos_copy changing force to replace * Fixed failing zos_find test case * Updated zos_unarchive test to work with zos_archive new interface and remove zos_data_set deps * Updated tests * Updated tests * Updated changelog --- .../2256-zos_archive-interface-update.yml | 13 + plugins/module_utils/better_arg_parser.py | 6 - plugins/modules/zos_apf.py | 2 +- plugins/modules/zos_archive.py | 171 +++---- .../modules/test_zos_archive_func.py | 158 +++---- .../functional/modules/test_zos_find_func.py | 7 +- .../modules/test_zos_unarchive_func.py | 424 ++++++++---------- 7 files changed, 386 insertions(+), 395 deletions(-) create mode 100644 changelogs/fragments/2256-zos_archive-interface-update.yml diff --git a/changelogs/fragments/2256-zos_archive-interface-update.yml b/changelogs/fragments/2256-zos_archive-interface-update.yml new file mode 100644 index 0000000000..7a0d576d6d --- /dev/null +++ b/changelogs/fragments/2256-zos_archive-interface-update.yml @@ -0,0 +1,13 @@ +breaking_changes: + - zos_lineinfile - Option ``format.format_options`` is deprecated in favor of ``format.options``. + Option ``format.format_options.name`` is deprecated in favor of ``format.options.type``. + Option ``format.format_options.use_adrdssu`` is deprecated in favor of ``format.options.use_adrdssu``. + Option ``format.format_options.terse_pack`` is deprecated in favor of ``format.options.spack`` as a new boolean option. + (https://github.com/ansible-collections/ibm_zos_core/pull/2256). + +trivial: + - test_zos_find_func.py - modified test case test_find_gdg_and_nonvsam_data_sets condition to make it less fragile in case there + are other data sets with the same HLQ in the system that are not part of the test. + (https://github.com/ansible-collections/ibm_zos_core/pull/2256). + - zos_apf - Modified alias deprecation version from 2.0 to 3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/2256). \ No newline at end of file diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index e5dd8e975c..2cca37a316 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -1108,12 +1108,6 @@ def _add_alias(self, arg_name, arg_aliases=None, aliases=None): aliases = {} arg_aliases.append(arg_name) for alternate_name in arg_aliases: - if aliases.get(alternate_name, arg_name) != arg_name: - raise ValueError( - 'Conflicting aliases "{0}" and "{1}" found for name "{2}"'.format( - aliases.get(alternate_name), alternate_name, arg_name - ) - ) aliases[alternate_name] = arg_name return aliases diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 5f8ca037dd..b6faf31610 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -454,7 +454,7 @@ def main(): deprecated_aliases=[ dict( name='data_set_name', - version='2.0.0', + version='3.0.0', collection_name='ibm.ibm_zos_core') ], ), diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index a02ec00a59..158250ad4a 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -51,7 +51,7 @@ type: dict required: false suboptions: - name: + type: description: - The compression format to use. type: str @@ -65,27 +65,27 @@ - terse - xmit - pax - format_options: + aliases: [ name ] + options: description: - Options specific to a compression format. type: dict required: false + aliases: [ format_options ] suboptions: - terse_pack: + spack: description: - Compression option for use with the terse format, - I(name=terse). + I(type=terse). - Pack will compress records in a data set so that the output results in lossless data compression. - Spack will compress records in a data set so the output results in complex data compression. - Spack will produce smaller output and take approximately 3 times longer than pack compression. - type: str + type: bool required: false - choices: - - pack - - spack + default: true xmit_log_data_set: description: - Provide the name of a data set to store xmit log output. @@ -97,7 +97,7 @@ - When providing the I(xmit_log_data_set) name, ensure there is adequate space. type: str - use_adrdssu: + adrdssu: description: - If set to true, the C(zos_archive) module will use Data Facility Storage Management Subsystem data set services @@ -105,6 +105,7 @@ portable format before using C(xmit) or C(terse). type: bool default: false + aliases: [ use_adrdssu ] dest: description: - The remote absolute path or data set where the archive should be @@ -358,7 +359,7 @@ retrieve to the controller and then zos_copy or zos_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. - - When packing and using C(use_adrdssu) flag the module will take up to two + - When packing and using C(adrdssu) flag the module will take up to two times the space indicated in C(dest_data_set). - tar, zip, bz2 and pax are archived using python C(tarfile) library which uses the latest version available for each format, for compatibility when @@ -378,7 +379,7 @@ src: /tmp/archive/foo.txt dest: /tmp/archive/foo_archive_test.tar format: - name: tar + type: tar # Archive multiple files - name: Archive list of files into a zip @@ -388,7 +389,7 @@ - /tmp/archive/bar.txt dest: /tmp/archive/foo_bar_archive_test.zip format: - name: zip + type: zip # Archive one data set into terse - name: Archive data set into a terse @@ -396,7 +397,7 @@ src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse # Use terse with different options - name: Archive data set into a terse, specify pack algorithm and use adrdssu @@ -404,10 +405,10 @@ src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - terse_pack: "spack" - use_adrdssu: true + type: terse + options: + spack: true + adrdssu: true # Use a pattern to store - name: Archive data set pattern using xmit @@ -416,7 +417,7 @@ exclude_sources: "USER.ARCHIVE.EXCLUDE.*" dest: "USER.ARCHIVE.RESULT.XMIT" format: - name: xmit + type: xmit - name: Archive multiple GDSs into a terse zos_archive: @@ -426,25 +427,25 @@ - "USER.GDG(-2)" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true - name: Archive multiple data sets into a new GDS zos_archive: src: "USER.ARCHIVE.*" dest: "USER.GDG(+1)" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true - name: Encode the source data set into Latin-1 before archiving into a terse data set zos_archive: src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse encoding: from: IBM-1047 to: ISO8859-1 @@ -456,9 +457,9 @@ - "USER.ARCHIVE2.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true encoding: from: IBM-1047 to: ISO8859-1 @@ -467,6 +468,12 @@ ''' RETURN = r''' +dest: + description: + - The remote absolute path or data set where the archive was + created. + type: str + returned: always state: description: - The state of the input C(src). @@ -574,7 +581,7 @@ def get_archive_handler(module): The archive format for the module. """ - format = module.params.get("format").get("name") + format = module.params.get("format").get("type") if format in ["tar", "gz", "bz2", "pax"]: return TarArchive(module) elif format == "terse": @@ -705,7 +712,7 @@ def __init__(self, module): """ self.module = module self.dest = module.params['dest'] - self.format = module.params.get("format").get("name") + self.format = module.params.get("format").get("type") self.remove = module.params['remove'] self.changed = False self.errors = [] @@ -1179,7 +1186,7 @@ def __init__(self, module): ---------- original_checksums : str The SHA256 hash of the contents of input file. - use_adrdssu : bool + adrdssu : bool Whether to use Data Facility Storage Management Subsystem data set services program ADRDSSU to uncompress data sets or not. expanded_sources : list[str] @@ -1198,7 +1205,7 @@ def __init__(self, module): super(MVSArchive, self).__init__(module) self.tmphlq = module.params.get("tmp_hlq") self.original_checksums = self.dest_checksums() - self.use_adrdssu = module.params.get("format").get("format_options").get("use_adrdssu") + self.adrdssu = module.params.get("format").get("options").get("adrdssu") self.expanded_sources = self.expand_mvs_paths(self.sources) self.expanded_exclude_sources = self.expand_mvs_paths(module.params['exclude']) self.sources = sorted(set(self.expanded_sources) - set(self.expanded_exclude_sources)) @@ -1339,18 +1346,6 @@ def create_dest_ds(self, name): """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length, tmphlq=self.tmphlq) - # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) - # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) - # rc, out, err = self.module.run_command(cmd) - - # if not changed: - # self.module.fail_json( - # msg="Failed preparing {0} to be used as an archive".format(name), - # stdout=out, - # stderr=err, - # stdout_lines=cmd, - # rc=rc, - # ) return name def dump_into_temp_ds(self, temp_ds): @@ -1619,13 +1614,10 @@ def __init__(self, module): Compression option for use with the terse format. """ super(AMATerseArchive, self).__init__(module) - self.pack_arg = module.params.get("format").get("format_options").get("terse_pack") + spack = module.params.get("format").get("options").get("spack") # We store pack_ard in uppercase because the AMATerse command requires # it in uppercase. - if self.pack_arg is None: - self.pack_arg = "SPACK" - else: - self.pack_arg = self.pack_arg.upper() + self.pack_arg = "SPACK" if spack else "PACK" def add(self, src, archive): """Archive src into archive using AMATERSE program. @@ -1665,9 +1657,9 @@ def archive_targets(self): Raises ------ fail_json - To archive multiple source data sets, you must use option 'use_adrdssu=True'. + To archive multiple source data sets, you must use option 'adrdssu=True'. """ - if self.use_adrdssu: + if self.adrdssu: source, changed = self._create_dest_data_set( type="seq", record_format="u", @@ -1682,7 +1674,7 @@ def archive_targets(self): # If we don't use a adrdssu container we cannot pack multiple data sets if len(self.targets) > 1: self.module.fail_json( - msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") + msg="To archive multiple source data sets, you must use option 'adrdssu=True'.") source = self.targets[0] dataset = data_set.MVSDataSet( name=self.dest, @@ -1714,7 +1706,7 @@ def __init__(self, module): The name of the data set to store xmit log output. """ super(XMITArchive, self).__init__(module) - self.xmit_log_data_set = module.params.get("format").get("format_options").get("xmit_log_data_set") + self.xmit_log_data_set = module.params.get("format").get("options").get("xmit_log_data_set") def add(self, src, archive): """Archive src into archive using TSO XMIT. @@ -1759,9 +1751,9 @@ def archive_targets(self): Raises ------ fail_json - To archive multiple source data sets, you must use option 'use_adrdssu=True'. + To archive multiple source data sets, you must use option 'adrdssu=True'. """ - if self.use_adrdssu: + if self.adrdssu: source, changed = self._create_dest_data_set( type="seq", record_format="u", @@ -1776,7 +1768,7 @@ def archive_targets(self): # If we don't use a adrdssu container we cannot pack multiple data sets if len(self.sources) > 1: self.module.fail_json( - msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") + msg="To archive multiple source data sets, you must use option 'adrdssu=True'.") source = self.sources[0] # dest = self.create_dest_ds(self.dest) dataset = data_set.MVSDataSet( @@ -1871,25 +1863,49 @@ def run_module(): format=dict( type='dict', options=dict( - name=dict( + type=dict( type='str', default='gz', - choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'], + aliases=['name'], + deprecated_aliases=[ + dict( + name='name', + version='3.0.0', + collection_name='ibm.ibm_zos_core' + ) + ], ), - format_options=dict( + options=dict( type='dict', required=False, + aliases=['format_options'], + deprecated_aliases=[ + dict( + name='format_options', + version='3.0.0', + collection_name='ibm.ibm_zos_core' + ) + ], options=dict( - terse_pack=dict( - type='str', - choices=['pack', 'spack'], + spack=dict( + type='bool', + default=True, ), xmit_log_data_set=dict( type='str', ), - use_adrdssu=dict( + adrdssu=dict( type='bool', default=False, + aliases=['use_adrdssu'], + deprecated_aliases=[ + dict( + name='use_adrdssu', + version='3.0.0', + collection_name='ibm.ibm_zos_core' + ) + ], ) ), ), @@ -1966,41 +1982,44 @@ def run_module(): format=dict( type='dict', options=dict( - name=dict( + type=dict( type='str', default='gz', - choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'], + aliases=['name'], ), - format_options=dict( + options=dict( type='dict', required=False, options=dict( - terse_pack=dict( - type='str', + spack=dict( + type='bool', required=False, - choices=['pack', 'spack'], + default=True, ), xmit_log_data_set=dict( type='str', required=False, ), - use_adrdssu=dict( + adrdssu=dict( type='bool', default=False, + aliases=['use_adrdssu'], ) ), default=dict( - terse_pack="spack", + spack=True, xmit_log_data_set="", - use_adrdssu=False), + adrdssu=False), + aliases=['format_options'], ), ), default=dict( - name="", - format_options=dict( - terse_pack="spack", + type="", + options=dict( + spack=True, xmit_log_data_set="", - use_adrdssu=False + adrdssu=False ) ), ), diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index a48e79dff8..83dff1026a 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -131,7 +131,7 @@ def test_uss_single_archive(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) @@ -162,7 +162,7 @@ def test_uss_single_archive_with_mode(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format }, mode=dest_mode ) @@ -191,7 +191,7 @@ def test_uss_single_archive_with_force_option(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) @@ -203,7 +203,7 @@ def test_uss_single_archive_with_force_option(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) @@ -216,7 +216,7 @@ def test_uss_single_archive_with_force_option(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format }, force=True, ) @@ -257,7 +257,7 @@ def test_uss_archive_multiple_files(ansible_zos_module, ds_format, path): src=path.get("files"), dest=dest, format={ - "name":ds_format + "type":ds_format }, ) @@ -302,7 +302,7 @@ def test_uss_archive_multiple_files_with_exclude(ansible_zos_module, ds_format, src=path.get("files"), dest=dest, format={ - "name":ds_format + "type":ds_format }, exclude=path.get("exclude") ) @@ -337,7 +337,7 @@ def test_uss_archive_remove_targets(ansible_zos_module, ds_format): src=paths, dest=dest, format={ - "name":ds_format + "type":ds_format }, remove=True ) @@ -367,7 +367,7 @@ def test_uss_archive_encode(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format }, encoding={ "from": TO_ENCODING, @@ -401,7 +401,7 @@ def test_uss_archive_encode_skip_encoding(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format }, encoding={ "from": FROM_ENCODING, @@ -432,10 +432,10 @@ def test_uss_archive_encode_skip_encoding(ansible_zos_module, ds_format): # List of tests: # - test_mvs_archive_single_dataset -# - test_mvs_archive_single_dataset_use_adrdssu +# - test_mvs_archive_single_dataset_adrdssu # - test_mvs_archive_single_data_set_remove_target # - test_mvs_archive_multiple_data_sets -# - test_mvs_archive_multiple_data_sets_use_adrdssu +# - test_mvs_archive_multiple_data_sets_adrdssu # - test_mvs_archive_multiple_data_sets_remove_target # - test_mvs_archive_multiple_data_sets_with_exclusion # - test_mvs_archive_multiple_data_sets_with_missing @@ -519,11 +519,11 @@ def test_mvs_archive_single_dataset( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=src_data_set, @@ -536,7 +536,7 @@ def test_mvs_archive_single_dataset( assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -571,7 +571,7 @@ def test_mvs_archive_single_dataset( @pytest.mark.parametrize( "record_format", ["fb", "vb"], ) -def test_mvs_archive_single_dataset_use_adrdssu( +def test_mvs_archive_single_dataset_adrdssu( ansible_zos_module, ds_format, data_set, @@ -617,13 +617,13 @@ def test_mvs_archive_single_dataset_use_adrdssu( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } - format_dict["format_options"] = { - "use_adrdssu":True + format_dict["options"] = { + "adrdssu":True } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") + format_dict["options"].update(spack=True) archive_result = hosts.all.zos_archive( src=src_data_set, dest=archive_data_set, @@ -635,7 +635,7 @@ def test_mvs_archive_single_dataset_use_adrdssu( assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -699,11 +699,11 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, ds_format hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=src_data_set, @@ -717,11 +717,17 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, ds_format assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + # Changed to using the exact data set name in dls + # because using wildcards would fail. + # Assert archive data set is in place + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") - assert src_data_set != c_result.get("stdout") + # Assert src_data_set is removed + cmd_result = hosts.all.shell(cmd = f"dls {src_data_set}") + for c_result in cmd_result.contacted.values(): + assert f"BGYSC1103E No datasets match pattern: {src_data_set}." in c_result.get("stderr") finally: hosts.all.zos_data_set(name=src_data_set, state="absent") hosts.all.zos_data_set(name=archive_data_set, state="absent") @@ -773,12 +779,12 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, ds_format, data_set) hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{src_data_set}*", dest=archive_data_set, @@ -791,7 +797,7 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, ds_format, data_set) assert result.get("dest") == archive_data_set for ds in target_ds_list: assert ds.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -846,12 +852,12 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, ds_fo hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) exclude = f"{src_data_set}1" archive_result = hosts.all.zos_archive( src=f"{src_data_set}*", @@ -869,8 +875,8 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, ds_fo assert exclude not in result.get("archived") else: assert ds.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -924,12 +930,12 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, ds_format hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{src_data_set}*", dest=archive_data_set, @@ -1004,12 +1010,12 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, ds_form path_list = [ds.get("name") for ds in target_ds_list] format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=path_list, dest=archive_data_set, @@ -1027,8 +1033,8 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, ds_form assert ds.get("name") not in result.get("archived") else: assert ds.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -1092,11 +1098,11 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, ds_format, da hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) @@ -1122,8 +1128,8 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, ds_format, da assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -1165,10 +1171,10 @@ def test_gdg_archive(ansible_zos_module, dstype, format): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - format_dict = dict(name=format, format_options=dict()) + format_dict = dict(type=format, options=dict()) if format == "terse": - format_dict["format_options"] = dict(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"] = dict(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=[f"{data_set_name}(0)",f"{data_set_name}(-1)" ], dest=archive_data_set, @@ -1179,7 +1185,7 @@ def test_gdg_archive(ansible_zos_module, dstype, format): assert result.get("dest") == archive_data_set assert f"{data_set_name}.G0001V00" in result.get("archived") assert f"{data_set_name}.G0002V00" in result.get("archived") - cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(hlq)) + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}' ") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -1208,10 +1214,10 @@ def test_archive_into_gds(ansible_zos_module, dstype, format): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - format_dict = dict(name=format, format_options=dict()) + format_dict = dict(type=format, options=dict()) if format == "terse": - format_dict["format_options"] = dict(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"] = dict(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=data_set_name, dest=f"{archive_data_set}(+1)", @@ -1220,9 +1226,9 @@ def test_archive_into_gds(ansible_zos_module, dstype, format): for result in archive_result.contacted.values(): assert result.get("changed") is True assert data_set_name in result.get("archived") - cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(hlq)) + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}*' ") for c_result in cmd_result.contacted.values(): - assert archive_data_set in c_result.get("stdout") + assert f"{archive_data_set}.G0001V00" in c_result.get("stdout") finally: hosts.all.shell(cmd=f"drm {hlq}.*") @@ -1289,11 +1295,11 @@ def test_mvs_archive_single_dataset_encoding( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=src_data_set, @@ -1306,7 +1312,7 @@ def test_mvs_archive_single_dataset_encoding( assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -1364,9 +1370,9 @@ def test_mvs_archive_multiple_dataset_pattern_encoding(ansible_zos_module, ds_fo ds_target = f"{ds_name}({member})" if member else ds_name hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_target}\"") - format_dict = {"name": ds_format} + format_dict = {"type": ds_format} if ds_format == "terse": - format_dict["format_options"] = {"terse_pack": "spack"} + format_dict["options"] = {"spack": True} for ds_name in matched_datasets: archive_data_set = get_tmp_ds_name() archive_result = hosts.all.zos_archive( @@ -1438,14 +1444,14 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_skip_encoding(ansible_zos ds_target = f"{ds_name}({member})" if member else ds_name hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_target}\"") - format_dict = {"name": ds_format} + format_dict = {"type": ds_format} if ds_format == "terse": - format_dict["format_options"] = {"terse_pack": "spack"} - #skipping some files to encode + format_dict["options"] = {"spack": True} + #skipping some files to encode skip_encoding_list = [matched_datasets[0]] current_encoding_config = encoding.copy() current_encoding_config["skip_encoding"] = skip_encoding_list - + for ds_name in matched_datasets: archive_data_set = get_tmp_ds_name() archive_result = hosts.all.zos_archive( @@ -1511,16 +1517,16 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib type="member", state="present" ) - + test_line = "pattern match" for ds_name in all_datasets_to_process: for member in data_set.get("members"): ds_target = f"{ds_name}({member})" if member else ds_name hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_target}\"") - format_dict = {"name": ds_format} + format_dict = {"type": ds_format} if ds_format == "terse": - format_dict["format_options"] = {"terse_pack": "spack"} + format_dict["options"] = {"spack": True} for ds_name in matched_datasets: original_hex_result = hosts.all.shell(cmd=f"dcat '{ds_name}' | od -x") @@ -1548,7 +1554,7 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib original_hex.append('*') else: parts = line.split() - if len(parts) > 1: + if len(parts) > 1: original_hex.extend(parts[1:]) reverted_hex = [] @@ -1564,13 +1570,13 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib try: original_first_star_idx = original_hex.index('*') except ValueError: - original_first_star_idx = len(original_hex) + original_first_star_idx = len(original_hex) try: reverted_first_star_idx = reverted_hex.index('*') except ValueError: reverted_first_star_idx = len(reverted_hex) - + original_hex_to_compare = original_hex[:original_first_star_idx] reverted_hex_to_compare = reverted_hex[:reverted_first_star_idx] @@ -1589,4 +1595,4 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib for ds_name in matched_datasets: hosts.all.zos_data_set(name=ds_name, state="absent") for archive_ds in archived_datasets: - hosts.all.zos_data_set(name=archive_ds, state="absent") \ No newline at end of file + hosts.all.zos_data_set(name=archive_ds, state="absent") diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index c773223ced..ac2fd27ab8 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -689,10 +689,13 @@ def test_find_gdg_and_nonvsam_data_sets(ansible_zos_module): patterns=[f'{TEST_SUITE_HLQ}.*.*'], resource_type=["gdg", "nonvsam"], ) + data_sets = [{"name":data_set_name, "type": "NONVSAM"} for data_set_name in SEQ_NAMES] + data_sets.append({"name":gdg_b, "type": "GDG"}) for val in find_res.contacted.values(): assert val.get('msg') is None - assert len(val.get('data_sets')) == 4 - assert {"name":gdg_b, "type": "GDG"} in val.get('data_sets') + assert len(val.get('data_sets')) >= 4 + for data_set in data_sets: + assert data_set in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) finally: # Remove GDG. diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 5b4aff3dfd..c9ec479091 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -98,17 +98,11 @@ def create_multiple_data_sets(ansible_zos_module, base_name, n, ds_type, ): for i in range(n): curr_ds = { "name":base_name+str(i), - "type":ds_type, - "state":"present", - "replace":True, - "force":True } + ansible_zos_module.all.shell(cmd=f"dtouch -t{ds_type} '{base_name+str(i)}'") test_data_sets.append(curr_ds) # Create data sets in batch - ansible_zos_module.all.zos_data_set( - batch=test_data_sets - ) return test_data_sets @@ -117,15 +111,9 @@ def create_multiple_members(ansible_zos_module, pds_name, member_base_name, n): for i in range(n): curr_ds = { "name":f"{pds_name}({member_base_name}{i})", - "type":"member", - "state":"present", - "replace":True, - "force":True } + ansible_zos_module.all.shell(cmd=f"decho '' '{pds_name}({member_base_name}{i})'") test_members.append(curr_ds) - ansible_zos_module.all.zos_data_set( - batch=test_members - ) return test_members @@ -159,7 +147,7 @@ def test_uss_unarchive(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -198,7 +186,7 @@ def test_uss_unarchive_include(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) uss_files = [file[len(USS_TEMP_DIR)+1:] for file in USS_TEST_FILES] @@ -242,7 +230,7 @@ def test_uss_unarchive_exclude(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -285,7 +273,7 @@ def test_uss_unarchive_list(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -322,7 +310,7 @@ def test_uss_single_unarchive_with_mode(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) for file in list(USS_TEST_FILES.keys()): @@ -406,7 +394,7 @@ def test_uss_unarchive_encoding(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -448,7 +436,7 @@ def test_uss_unarchive_encoding_skip_encoding(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -543,26 +531,13 @@ def test_mvs_unarchive_single_data_set( mvs_dest_archive = get_tmp_ds_name() dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" - # Clean env - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present", - replace=True - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW if record_format in ["v", "vb"]: @@ -577,11 +552,11 @@ def test_mvs_unarchive_single_data_set( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type": ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack": True } archive_result = hosts.all.zos_archive( src=dataset, @@ -603,14 +578,15 @@ def test_mvs_unarchive_single_data_set( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, dest_data_set={ "name":dataset, @@ -634,8 +610,8 @@ def test_mvs_unarchive_single_data_set( for result in cat_result.contacted.values(): assert result.get("stdout") == test_line finally: - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") + hosts.all.shell(cmd=f"drm '{mvs_dest_archive}'") @pytest.mark.ds @pytest.mark.parametrize( @@ -677,26 +653,13 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( mvs_dest_archive = get_tmp_ds_name() dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" - # Clean env - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present", - replace=True - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW if record_format in ["v", "vb"]: @@ -711,13 +674,13 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } format_dict["format_options"] = { - "use_adrdssu":True + "adrdssu":True } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") + format_dict["format_options"].update(spack=True) archive_result = hosts.all.zos_archive( src=dataset, dest=mvs_dest_archive, @@ -732,14 +695,18 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True ) @@ -753,8 +720,8 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( for c_result in cmd_result.contacted.values(): assert dataset in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") + hosts.all.shell(cmd=f"drm '{mvs_dest_archive}'") @pytest.mark.ds @pytest.mark.parametrize( @@ -803,12 +770,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, ds_form hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name").replace('$', '\\$'))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -818,12 +785,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, ds_form dataset = dataset.replace("$", "/$") hosts.all.shell(cmd=f"drm {dataset}*") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, force=True ) @@ -839,8 +810,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, ds_form assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f""" drm '{dataset}*' """) + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -893,12 +864,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include( hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -911,13 +882,17 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include( # remote data_sets from host hosts.all.shell(cmd=f"drm {dataset}*") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action include_ds = f"{dataset}0" unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, include=[include_ds], ) @@ -938,8 +913,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include( assert target_ds.get("name") not in result.get("targets") assert target_ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -992,12 +967,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude( hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -1007,13 +982,17 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude( # remote data_sets from host hosts.all.shell(cmd=f""" drm "{dataset}*" """) - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action exclude_ds = f"{dataset}0" unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, exclude=[exclude_ds], ) @@ -1033,8 +1012,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude( assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -1083,12 +1062,12 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, ds_format, dat hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -1098,12 +1077,16 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, ds_format, dat # remote data_sets from host hosts.all.shell(cmd=f"drm {dataset}*") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, list=True ) @@ -1119,8 +1102,8 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, ds_format, dat assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd=f"""drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -1184,24 +1167,28 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force( hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, format=format_dict, ) - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, force=force ) @@ -1221,8 +1208,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force( assert result.get("changed") is False assert result.get("failed", False) is True finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -1265,25 +1252,13 @@ def test_mvs_unarchive_single_data_set_remote_src( dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") - # Clean env - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present" - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW if record_format in ["v", "vb"]: @@ -1298,13 +1273,13 @@ def test_mvs_unarchive_single_data_set_remote_src( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } - format_dict["format_options"] = { - "use_adrdssu":True + format_dict["options"] = { + "adrdssu":True } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") + format_dict["options"].update(spack=True) archive_result = hosts.all.zos_archive( src=dataset, dest=mvs_dest_archive, @@ -1318,7 +1293,7 @@ def test_mvs_unarchive_single_data_set_remote_src( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") # fetch archive data set into tmp folder fetch_result = hosts.all.zos_fetch( @@ -1330,12 +1305,16 @@ def test_mvs_unarchive_single_data_set_remote_src( for res in fetch_result.contacted.values(): source_path = res.get("dest") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=source_path, - format=format_dict, + format=unarchive_format_dict, remote_src=False, ) @@ -1355,8 +1334,8 @@ def test_mvs_unarchive_single_data_set_remote_src( finally: - hosts.all.shell(cmd=f"drm {dataset}*") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") tmp_folder.cleanup() @@ -1378,9 +1357,6 @@ def test_mvs_unarchive_single_data_set_remote_src( }, ] ) -@pytest.mark.parametrize( - "record_length", [80] -) @pytest.mark.parametrize( "encoding", [ {"from": "IBM-1047", "to": "ISO8859-1"}, @@ -1390,7 +1366,6 @@ def test_mvs_unarchive_encoding( ansible_zos_module, ds_format, data_set, - record_length, encoding ): try: @@ -1399,26 +1374,14 @@ def test_mvs_unarchive_encoding( dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" record_format = "fb" - # Clean env - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + record_length = 80 # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present", - replace=True - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") test_line = "a" * record_length for member in data_set.get("members"): if member == "": @@ -1428,11 +1391,11 @@ def test_mvs_unarchive_encoding( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=dataset, @@ -1454,20 +1417,21 @@ def test_mvs_unarchive_encoding( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}' ") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, dest_data_set={ "name":dataset, "type":data_set.get("dstype"), "record_format":record_format, - "record_length":record_length + "record_length":record_length }, encoding=encoding, ) @@ -1481,8 +1445,8 @@ def test_mvs_unarchive_encoding( for c_result in cmd_result.contacted.values(): assert dataset in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @@ -1524,26 +1488,13 @@ def test_mvs_unarchive_encoding_skip_encoding( dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" record_format = "fb" - # Clean env - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present", - replace=True - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") test_line = "a" * record_length for member in data_set.get("members"): if member == "": @@ -1553,11 +1504,11 @@ def test_mvs_unarchive_encoding_skip_encoding( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=dataset, @@ -1579,12 +1530,13 @@ def test_mvs_unarchive_encoding_skip_encoding( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}' ") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + } - #skipping some files to encode + # skipping some files to encode skip_encoding_list = [dataset] current_encoding_config = encoding.copy() current_encoding_config["skip_encoding"] = skip_encoding_list @@ -1592,13 +1544,13 @@ def test_mvs_unarchive_encoding_skip_encoding( # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, dest_data_set={ "name":dataset, "type":data_set.get("dstype"), "record_format":record_format, - "record_length":record_length + "record_length":record_length }, encoding=encoding, ) @@ -1612,8 +1564,8 @@ def test_mvs_unarchive_encoding_skip_encoding( for c_result in cmd_result.contacted.values(): assert dataset in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @@ -1668,12 +1620,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_encoding(ansible_zos_module hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name").replace('$', '\\$'))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -1683,12 +1635,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_encoding(ansible_zos_module dataset = dataset.replace("$", "/$") hosts.all.shell(cmd=f"drm {dataset}*") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, force=True, encoding=encoding @@ -1705,8 +1661,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_encoding(ansible_zos_module assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): @@ -1716,17 +1672,17 @@ def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): # False path source_path = "/tmp/OMVSADM.NULL" - format_dict = { + unarchive_format_dict = { "name":'terse' } - format_dict["format_options"] = { + unarchive_format_dict["format_options"] = { "use_adrdssu":True } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=source_path, - format=format_dict, + format=unarchive_format_dict, remote_src=False, ) @@ -1738,24 +1694,26 @@ def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): @pytest.mark.ds @pytest.mark.parametrize( - "format", [ + "ds_format", [ "terse", "xmit", ]) @pytest.mark.parametrize("dstype", ["seq", "pds", "pdse"]) -def test_gdg_unarchive(ansible_zos_module, dstype, format): +def test_gdg_unarchive(ansible_zos_module, dstype, ds_format): try: HLQ = "ANSIBLE" hosts = ansible_zos_module data_set_name = get_tmp_ds_name(symbols=True) archive_data_set = get_tmp_ds_name(symbols=True) - results = hosts.all.zos_data_set( - batch = [ - { "name":data_set_name, "state":"present", "type":"gdg", "limit":3}, - { "name":f"{data_set_name}(+1)", "state":"present", "type":dstype}, - { "name":f"{data_set_name}(+1)", "state":"present", "type":dstype}, - ] - ) + results = hosts.all.shell(cmd=f"dtouch -tGDG -L3 '{data_set_name}'") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{data_set_name}(+1)'") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{data_set_name}(+1)'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None @@ -1778,12 +1736,10 @@ def test_gdg_unarchive(ansible_zos_module, dstype, format): for ds in ds_to_write: hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds)) - format_dict = dict(name=format, format_options=dict()) - if format == "terse": - format_dict["format_options"] = dict(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) - if format == "terse": - del format_dict["format_options"]["terse_pack"] + format_dict = dict(name=ds_format, options=dict()) + if ds_format == "terse": + format_dict["options"] = dict(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=[f"{data_set_name}(0)",f"{data_set_name}(-1)" ], dest=archive_data_set, @@ -1797,16 +1753,16 @@ def test_gdg_unarchive(ansible_zos_module, dstype, format): cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") - - hosts.all.zos_data_set( - batch=[ - {"name": f"{data_set_name}(-1)", "state": "absent"}, - {"name": f"{data_set_name}(0)", "state": "absent"}, - ] - ) + hosts.all.shell(cmd=f"drm '{data_set_name}(-1)' && drm '{data_set_name}(0)'") + unarchive_format_dict = { + "name": ds_format, + "format_options": { + "use_adrdssu": True + } + } unarchive_result = hosts.all.zos_unarchive( src=archive_data_set, - format=format_dict, + format=unarchive_format_dict, remote_src=True ) for result in unarchive_result.contacted.values(): @@ -1846,7 +1802,7 @@ def test_zos_unarchive_async(ansible_zos_module, get_config): archive_result = hosts_zos.all.zos_archive(src=list(USS_TEST_FILES.keys()), dest=dest, format=dict( - name=archive_format + type=archive_format )) # remove files for file in USS_TEST_FILES.keys(): From 3a832ab6d5c32ee82106a5156d4e9a2f510c828e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 2 Sep 2025 11:52:09 -0500 Subject: [PATCH 27/73] [Enabler][2146]update_zos_find_interface (#2261) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Test * Revert "Test" This reverts commit aed8d8dcf550f90abf3d5d1c4cab97ca5bdebf28. * Migrate interface * Debug testing * Modify testing * Modify test suite * Add remove for pdspaths * Modify fails of validations * Complete test suite and new version * Add fragment * Fix sanity * Add functions for excludes specfici members or datasets * Fix sanity * Add validation * Add validation * Add validation * Fix variable set * Update plugins/modules/zos_find.py Co-authored-by: Fernando Flores * Fix find and documentation * Fix changelog --------- Co-authored-by: André Marcel Gutiérrez Benítez Co-authored-by: Fernando Flores --- .../2261-update_zos_find_interface.yml | 4 + plugins/modules/zos_find.py | 177 ++++++++---------- .../functional/modules/test_zos_find_func.py | 76 +++++++- 3 files changed, 149 insertions(+), 108 deletions(-) create mode 100644 changelogs/fragments/2261-update_zos_find_interface.yml diff --git a/changelogs/fragments/2261-update_zos_find_interface.yml b/changelogs/fragments/2261-update_zos_find_interface.yml new file mode 100644 index 0000000000..777a0ac3b2 --- /dev/null +++ b/changelogs/fragments/2261-update_zos_find_interface.yml @@ -0,0 +1,4 @@ +breaking_changes: + - zos_find - Option value ``pds_pattern`` is deprecated. Previously, excluding members required adding pds_patterns. + The updated behavior now allows excluding members by placing them inside parentheses. + (https://github.com/ansible-collections/ibm_zos_core/pull/2261). \ No newline at end of file diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py index 68319b9171..a25785c450 100644 --- a/plugins/modules/zos_find.py +++ b/plugins/modules/zos_find.py @@ -66,6 +66,8 @@ Multiple patterns can be specified using a list. - The pattern can be a regular expression. - If the pattern is a regular expression, it must match the full data set name. + - To exclude members, the regular expression or pattern must be enclosed in parentheses. + This expression can be used alongside a pattern to exclude data set names. aliases: - exclude type: list @@ -78,7 +80,6 @@ names that match at least one of the patterns specified. Multiple patterns can be specified using a list. - This parameter expects a list, which can be either comma separated or YAML. - - If C(pds_patterns) is provided, C(patterns) must be member patterns. - When searching for members within a PDS/PDSE, pattern can be a regular expression. type: list elements: str @@ -92,17 +93,6 @@ - Filtering by size is currently only valid for sequential and partitioned data sets. required: false type: str - pds_patterns: - description: - - List of PDS/PDSE to search. Wildcard is possible. - - Required when searching for data set members. - - Valid only for C(nonvsam) resource types. Otherwise ignored. - aliases: - - pds_paths - - pds_pattern - type: list - elements: str - required: false resource_type: description: - The types of resources to search. @@ -218,6 +208,22 @@ EXAMPLES = r""" +- name: Exclude all members starting with characters 'TE' in a given list datasets patterns + zos_find: + excludes: '(^te.*)' + patterns: + - IMSTEST.TEST.* + - IMSTEST.USER.* + - USER.*.LIB + +- name: Exclude datasets that includes 'DATA' and members starting with characters 'MEM' in a given list datasets patterns + zos_find: + excludes: '^.*DATA.*(^MEM.*)' + patterns: + - IMSTEST.*.TEST + - IMSTEST.*.* + - USER.*.LIB + - name: Find all data sets with HLQ 'IMS.LIB' or 'IMSTEST.LIB' that contain the word 'hello' zos_find: patterns: @@ -238,14 +244,6 @@ contains: 'hello' excludes: '.*TEST' -- name: Find all members starting with characters 'TE' in a given list of PDS patterns - zos_find: - patterns: '^te.*' - pds_patterns: - - IMSTEST.TEST.* - - IMSTEST.USER.* - - USER.*.LIB - - name: Find all data sets greater than 2MB and allocated in one of the specified volumes zos_find: patterns: 'USER.*' @@ -344,7 +342,6 @@ import math import json -from copy import deepcopy from re import match as fullmatch @@ -418,7 +415,7 @@ def content_filter(module, patterns, content): return filtered_data_sets -def data_set_filter(module, pds_paths, patterns): +def data_set_filter(module, patterns): """ Find data sets that match any pattern in a list of patterns. Parameters @@ -442,7 +439,6 @@ def data_set_filter(module, pds_paths, patterns): Non-zero return code received while executing ZOAU shell command 'dls'. """ filtered_data_sets = dict(ps=set(), pds=dict(), searched=0) - patterns = pds_paths or patterns for pattern in patterns: rc, out, err = _dls_wrapper(pattern, list_details=True) if rc != 0: @@ -462,66 +458,40 @@ def data_set_filter(module, pds_paths, patterns): result = line.split() if result: if result[1] == "PO": - if pds_paths: - mls_rc, mls_out, mls_err = module.run_command( - "mls '{0}(*)'".format(result[0]), errors='replace' - ) - if mls_rc == 2: - filtered_data_sets["pds"][result[0]] = {} - else: - filtered_data_sets["pds"][result[0]] = \ - set(filter(None, mls_out.splitlines())) - else: + mls_rc, mls_out, mls_err = module.run_command( + f"mls '{result[0]}(*)'", errors='replace' + ) + if mls_rc == 2: filtered_data_sets["pds"][result[0]] = {} + else: + filtered_data_sets["pds"][result[0]] = \ + set(filter(None, mls_out.splitlines())) else: filtered_data_sets["ps"].add(result[0]) return filtered_data_sets -def pds_filter(module, pds_dict, member_patterns, excludes=None): +def filter_members(module, members, excludes): """ Return all PDS/PDSE data sets whose members match any of the patterns in the given list of member patterns. - Parameters ---------- module : AnsibleModule The Ansible module object being used in the module. - pds_dict : dict[str, str] - A dictionary where each key is the name of - of the PDS/PDSE and the value is a list of - members belonging to the PDS/PDSE. - member_patterns : list - A list of member patterns to search for. - + member : set + A list of member patterns to on it. + excludes : list + The str value to filter members. Returns ------- dict[str, set[str]] Filtered PDS/PDSE with corresponding members. """ - filtered_pds = dict() - for pds, members in pds_dict.items(): - for m in members: - for mem_pat in member_patterns: - if _match_regex(module, mem_pat, m): - try: - filtered_pds[pds].add(m) - except KeyError: - filtered_pds[pds] = set({m}) - # ************************************************************************ - # Exclude any member that matches a given pattern in 'excludes'. - # Changes will be made to 'filtered_pds' each iteration. Therefore, - # iteration should be performed over a copy of 'filtered_pds'. Because - # Python performs a shallow copy when copying a dictionary, a deep copy - # should be performed. - # ************************************************************************ - if excludes: - for pds, members in deepcopy(filtered_pds).items(): - for m in members: - for ex_pat in excludes: - if _match_regex(module, ex_pat, m): - filtered_pds[pds].remove(m) - break - return filtered_pds + filtered_members = { + member for member in members + if not any(_match_regex(module, exclude, member) for exclude in excludes) + } + return filtered_members def vsam_filter(module, patterns, vsam_types, age=None, excludes=None): @@ -817,7 +787,6 @@ def migrated_nonvsam_filter(module, data_sets, excludes): # Fetch only active datasets init_filtered_data_sets = data_set_filter( module, - None, [ds] ) active_datasets = \ @@ -906,6 +875,37 @@ def exclude_data_sets(module, data_set_list, excludes): return data_set_list +def get_members_to_exclude(excludes): + """Get from the excludes str any subject that is isndie () to get members to exclude + + Args + ---- + excludes : str + String of exlucions of the find operation including members + + Returns + ------- + members_to_exclude [list] + The patters of members to be exlude from the list + datasets_to_exclude [list] + The patters of datasets to be exlude from the list + """ + members_to_exclude = [] + datasets_to_exclude = [] + for exclude in excludes: + match = re.search(r'\(([^)]+)\)', exclude) + if match: + members = match.group(1) + datasets = exclude[:match.start()] + exclude[match.end():] + members_to_exclude.append(members) + if datasets: + datasets_to_exclude.append(datasets) + else: + if exclude: + datasets_to_exclude.append(exclude) + return members_to_exclude, datasets_to_exclude + + def _age_filter(ds_date, now, age): """Determine whether a given date is older than 'age'. @@ -1237,11 +1237,6 @@ def run_module(module): excludes = module.params.get('excludes') or module.params.get('exclude') patterns = module.params.get('patterns') size = module.params.get('size') - pds_paths = ( - module.params.get('pds_paths') - or module.params.get('pds_patterns') - or module.params.get('pds_pattern') - ) resource_type = module.params.get('resource_type') or module.params.get('resource_types') resource_type = [type.upper() for type in resource_type] volume = module.params.get('volume') or module.params.get('volumes') @@ -1260,6 +1255,9 @@ def run_module(module): filtered_migrated_types = set() vsam_migrated_types = set() + excludes_datasets = exclude_members = [] + if excludes: + exclude_members, excludes_datasets = get_members_to_exclude(excludes) for type in resource_type: if type in vsam_types: filtered_resource_types.add("VSAM") @@ -1294,7 +1292,7 @@ def run_module(module): for res_type in filtered_resource_types: examined = 0 filtered_data_sets = list() - init_filtered_data_sets = filtered_pds = dict() + init_filtered_data_sets = dict() if res_type == "MIGRATED": migrated_data_sets = list() for mtype in filtered_migrated_types: @@ -1307,25 +1305,18 @@ def run_module(module): if contains: init_filtered_data_sets = content_filter( module, - pds_paths if pds_paths else patterns, + patterns, contains ) else: init_filtered_data_sets = data_set_filter( module, - pds_paths, patterns ) - if pds_paths: - filtered_pds = pds_filter( - module, init_filtered_data_sets.get("pds"), patterns, excludes=excludes - ) - filtered_data_sets = list(filtered_pds.keys()) - else: - filtered_data_sets = \ - list(init_filtered_data_sets.get("ps").union(set(init_filtered_data_sets['pds'].keys()))) - if excludes: - filtered_data_sets = exclude_data_sets(module, filtered_data_sets, excludes) + filtered_data_sets = \ + list(init_filtered_data_sets.get("ps").union(set(init_filtered_data_sets['pds'].keys()))) + if len(excludes_datasets) > 0: + filtered_data_sets = exclude_data_sets(module, filtered_data_sets, excludes_datasets) # Filter data sets by age or size if size or age: filtered_data_sets = data_set_attribute_filter( @@ -1339,13 +1330,14 @@ def run_module(module): filtered_data_sets, examined = vsam_filter(module, patterns, vsam_resource_types, age=age, excludes=excludes) elif res_type == "GDG": filtered_data_sets = gdg_filter(module, patterns, limit, empty, fifo, purge, scratch, extended, excludes) - if filtered_data_sets: for ds in filtered_data_sets: if ds: if res_type == "NONVSAM": - members = filtered_pds.get(ds) or init_filtered_data_sets['pds'].get(ds) + members = init_filtered_data_sets['pds'].get(ds) if members: + if len(exclude_members) > 0: + members = filter_members(module, members, exclude_members) res_args['data_sets'].append( dict(name=ds, members=members, type=res_type) ) @@ -1389,12 +1381,6 @@ def main(): required=True ), size=dict(type="str", required=False), - pds_patterns=dict( - type="list", - elements="str", - required=False, - aliases=["pds_pattern", "pds_paths"] - ), resource_type=dict( type="list", elements="str", @@ -1438,11 +1424,6 @@ def main(): excludes=dict(arg_type="list", required=False, aliases=["exclude"]), patterns=dict(arg_type="list", required=True), size=dict(arg_type="str", required=False), - pds_patterns=dict( - arg_type="list", - required=False, - aliases=["pds_pattern", "pds_paths"] - ), resource_type=dict( arg_type="list", required=False, diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index ac2fd27ab8..1047c07e7d 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -102,6 +102,7 @@ def test_find_gdg_data_sets(ansible_zos_module): assert len(val.get('data_sets')) == 1 assert {"name":gdg_a, "type": "GDG"} in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None find_res = hosts.all.zos_find( patterns=[f'{TEST_SUITE_HLQ}.*.*'], @@ -114,6 +115,7 @@ def test_find_gdg_data_sets(ansible_zos_module): assert len(val.get('data_sets')) == 1 assert {"name":gdg_b, "type": "GDG"} in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None find_res = hosts.all.zos_find( patterns=[f'{TEST_SUITE_HLQ}.*.*'], @@ -127,6 +129,7 @@ def test_find_gdg_data_sets(ansible_zos_module): assert len(val.get('data_sets')) == 1 assert {"name":gdg_c, "type": "GDG"} in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None finally: # Remove one by one to avoid using an HLQ.* cuz it could cause bugs when running in parallel. @@ -160,6 +163,7 @@ def test_find_sequential_data_sets_containing_single_string(ansible_zos_module): for ds in val.get('data_sets'): assert ds.get('name') in SEQ_NAMES assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None finally: hosts.all.zos_data_set( batch=[ @@ -199,6 +203,8 @@ def test_find_sequential_data_sets_multiple_patterns(ansible_zos_module): for ds in val.get('data_sets'): assert ds.get('name') in SEQ_NAMES assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -241,15 +247,17 @@ def test_find_pds_members_containing_string(ansible_zos_module): result = hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}(MEMBER)\" ") find_res = hosts.all.zos_find( - pds_paths=[f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'], contains=search_string, - patterns=['.*'] + patterns=[f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'] ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) != 0 for ds in val.get('data_sets'): assert ds.get('name') in PDS_NAMES assert len(ds.get('members')) == 1 + assert val.get('matched') is not None + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -282,6 +290,9 @@ def test_exclude_data_sets_from_matched_list(ansible_zos_module): assert len(val.get('data_sets')) == 2 for ds in val.get('data_sets'): assert ds.get('name') in SEQ_NAMES + assert val.get('matched') is not None + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -322,14 +333,16 @@ def test_exclude_members_from_matched_list(ansible_zos_module): ] ) find_res = hosts.all.zos_find( - pds_paths=[f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'], - excludes=['.*FILE$'], - patterns=['.*'] + excludes=['(.*FILE$)'], + patterns=[f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'] ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 3 for ds in val.get('data_sets'): assert len(ds.get('members')) == 1 + assert val.get('matched') is not None + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -350,6 +363,8 @@ def test_find_data_sets_older_than_age(ansible_zos_module): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 assert val.get('matched') == 2 + assert val.get('examined') is not None + assert val.get('msg') is None @pytest.mark.parametrize("ds_type", DATASET_TYPES) @@ -364,6 +379,8 @@ def test_find_data_sets_larger_than_size(ansible_zos_module, ds_type): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 assert val.get('matched') == 2 + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set(name=TEST_PS1, state="absent") hosts.all.zos_data_set(name=TEST_PS2, state="absent") @@ -378,6 +395,8 @@ def test_find_data_sets_smaller_than_size(ansible_zos_module): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 assert val.get('matched') == 1 + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set(name=TEST_PS, state="absent") @@ -396,6 +415,8 @@ def test_find_data_sets_in_volume(ansible_zos_module, volumes_on_systems): for val in find_res.contacted.values(): assert len(val.get('data_sets')) >= 1 assert val.get('matched') >= 1 + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -419,6 +440,8 @@ def test_find_vsam_pattern(ansible_zos_module, volumes_on_systems): assert len(val.get('data_sets')) == 1 assert val.get('matched') == len(val.get('data_sets')) assert val.get('data_sets')[0].get("name", None) == VSAM_NAMES[0] + assert val.get('examined') is not None + assert val.get('msg') is None find_res = hosts.all.zos_find( patterns=[f'{TEST_SUITE_HLQ}.FIND.VSAM.FUNCTEST.*'], @@ -428,6 +451,8 @@ def test_find_vsam_pattern(ansible_zos_module, volumes_on_systems): assert len(val.get('data_sets')) == 1 assert val.get('matched') == len(val.get('data_sets')) assert val.get('data_sets')[0].get("name", None) == f"{VSAM_NAMES[0]}.DATA" + assert val.get('examined') is not None + assert val.get('msg') is None find_res = hosts.all.zos_find( patterns=[f'{TEST_SUITE_HLQ}.FIND.VSAM.FUNCTEST.*'], @@ -436,6 +461,8 @@ def test_find_vsam_pattern(ansible_zos_module, volumes_on_systems): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None + assert val.get('msg') is None find_res = hosts.all.zos_find( patterns=[f'{TEST_SUITE_HLQ}.FIND.VSAM.FUNCTEST.*'], @@ -445,6 +472,8 @@ def test_find_vsam_pattern(ansible_zos_module, volumes_on_systems): assert len(val.get('data_sets')) == 1 assert val.get('matched') == len(val.get('data_sets')) assert val.get('data_sets')[0].get("name", None) == f"{VSAM_NAMES[0]}.INDEX" + assert val.get('examined') is not None + assert val.get('msg') is None find_res = hosts.all.zos_find( patterns=[f'{TEST_SUITE_HLQ}.FIND.VSAM.FUNCTEST.*'], @@ -454,6 +483,8 @@ def test_find_vsam_pattern(ansible_zos_module, volumes_on_systems): assert len(val.get('data_sets')) == 3 assert val.get('matched') == len(val.get('data_sets')) assert val.get('examined') == 1 + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -487,6 +518,8 @@ def test_find_vsam_pattern_disp_old(ansible_zos_module, volumes_on_systems): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.shell(cmd=f"drm '{jcl_ds}'") hosts.all.zos_data_set( @@ -516,6 +549,8 @@ def test_find_vsam_in_volume(ansible_zos_module, volumes_on_systems): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -533,6 +568,7 @@ def test_find_invalid_age_indicator_fails(ansible_zos_module): find_res = hosts.all.zos_find(patterns=['some.pattern'], age='3s') for val in find_res.contacted.values(): assert val.get('msg') is not None + assert val.get('changed') is False def test_find_invalid_size_indicator_fails(ansible_zos_module): @@ -540,6 +576,7 @@ def test_find_invalid_size_indicator_fails(ansible_zos_module): find_res = hosts.all.zos_find(patterns=['some.pattern'], size='5h') for val in find_res.contacted.values(): assert val.get('msg') is not None + assert val.get('changed') is False def test_find_non_existent_data_sets(ansible_zos_module): @@ -548,17 +585,20 @@ def test_find_non_existent_data_sets(ansible_zos_module): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 0 assert val.get('matched') == 0 + assert val.get('examined') is not None + assert val.get('msg') is None def test_find_non_existent_data_set_members(ansible_zos_module): hosts = ansible_zos_module find_res = hosts.all.zos_find( - pds_paths=[f'{TEST_SUITE_HLQ}.NONE.PDS.*'], - patterns=['.*'] + patterns=[f'{TEST_SUITE_HLQ}.NONE.PDS.*'], ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 0 assert val.get('matched') == 0 + assert val.get('examined') is not None + assert val.get('msg') is None def test_find_mixed_members_from_pds_paths(ansible_zos_module): @@ -590,14 +630,15 @@ def test_find_mixed_members_from_pds_paths(ansible_zos_module): ] ) find_res = hosts.all.zos_find( - pds_paths=[f'{TEST_SUITE_HLQ}.NONE.PDS.*',f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'], - excludes=['.*FILE$'], - patterns=['.*'] + excludes=['(.*FILE$)'], + patterns=[f'{TEST_SUITE_HLQ}.NONE.PDS.*',f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'], ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 3 for ds in val.get('data_sets'): assert len(ds.get('members')) == 1 + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -631,6 +672,8 @@ def test_find_sequential_special_data_sets_containing_single_string(ansible_zos_ for ds in special_names: assert {"name":ds, "type": "NONVSAM"} in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None + assert val.get('msg') is None finally: for ds in special_names: hosts.all.shell(cmd=f"drm '{ds}'") @@ -657,6 +700,8 @@ def test_find_vsam_and_gdg_data_sets(ansible_zos_module, volumes_on_systems): assert val.get('matched') == len(val.get('data_sets')) assert {"name":VSAM_NAMES[0], "type": "CLUSTER"} in val.get('data_sets') assert {"name":gdg_a, "type": "GDG"} in val.get('data_sets') + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -697,6 +742,7 @@ def test_find_gdg_and_nonvsam_data_sets(ansible_zos_module): for data_set in data_sets: assert data_set in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None finally: # Remove GDG. hosts.all.shell(cmd=f"drm {gdg_b}") @@ -738,6 +784,8 @@ def test_find_vsam_and_nonvsam_data_sets(ansible_zos_module, volumes_on_systems) assert len(val.get('data_sets')) == 4 assert {"name":f'{VSAM_NAMES[0]}.DATA', "type": "DATA"} in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None + assert val.get('msg') is None finally: # Remove VSAM. hosts.all.zos_data_set( @@ -769,6 +817,8 @@ def test_find_migrated_data_sets(ansible_zos_module): assert len(val.get('data_sets')) != 0 for ds in val.get('data_sets'): assert ds.get("type") == "MIGRATED" + assert val.get('examined') is not None + assert val.get('msg') is None def test_find_migrated_data_sets_with_excludes(ansible_zos_module): @@ -782,6 +832,8 @@ def test_find_migrated_data_sets_with_excludes(ansible_zos_module): assert len(val.get('data_sets')) != 0 for ds in val.get('data_sets'): assert not re.fullmatch(r".*F4", ds.get("name")) + assert val.get('examined') is not None + assert val.get('msg') is None def test_find_migrated_data_sets_with_migrated_type(ansible_zos_module): @@ -796,6 +848,8 @@ def test_find_migrated_data_sets_with_migrated_type(ansible_zos_module): for ds in val.get('data_sets'): assert ds.get("type") == "MIGRATED" assert ds.get("migrated_resource_type") == "NONVSAM" + assert val.get('examined') is not None + assert val.get('msg') is None def test_find_migrated_and_gdg_data_sets(ansible_zos_module): @@ -815,6 +869,8 @@ def test_find_migrated_and_gdg_data_sets(ansible_zos_module): assert {"name":gdg_a, "type": "GDG"} in val.get('data_sets') for ds in val.get('data_sets'): assert ds.get("type") in ["MIGRATED", "GDG"] + assert val.get('examined') is not None + assert val.get('msg') is None finally: # Remove GDG. hosts.all.shell(cmd=f"drm {gdg_a}") From d7e6cf07135fe1b90a00657c7142c9a072d4d0ab Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Tue, 2 Sep 2025 10:52:50 -0600 Subject: [PATCH 28/73] [Enhancement][2.0][zos_data_set] Enhance error messages when not able to create GDGs (#2254) * cherry picked fix from 1.16. * Updated changelog * Fixed test zos_find * Added a proper quotes in dls to avoid shell resolving when using hlq.* * Avoid leaving temporary uss files behind --------- Co-authored-by: Mayank Mani --- .../2254-data_set-Enhance-error-message.yml | 3 ++ plugins/module_utils/data_set.py | 44 ++++++++++++++++++- plugins/modules/zos_blockinfile.py | 6 +-- .../modules/test_zos_archive_func.py | 24 +++++----- .../modules/test_zos_encode_func.py | 8 ++-- 5 files changed, 63 insertions(+), 22 deletions(-) create mode 100644 changelogs/fragments/2254-data_set-Enhance-error-message.yml diff --git a/changelogs/fragments/2254-data_set-Enhance-error-message.yml b/changelogs/fragments/2254-data_set-Enhance-error-message.yml new file mode 100644 index 0000000000..64b5d2c6a1 --- /dev/null +++ b/changelogs/fragments/2254-data_set-Enhance-error-message.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_data_set - Enhances error messages when creating a Generation Data Group fails. + (https://github.com/ansible-collections/ibm_zos_core/pull/2254) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 778fa7a290..2db045ee62 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -36,12 +36,14 @@ try: from zoautil_py import datasets, exceptions, gdgs, mvscmd, ztypes + from zoautil_py.exceptions import GenerationDataGroupCreateException except ImportError: datasets = ZOAUImportError(traceback.format_exc()) exceptions = ZOAUImportError(traceback.format_exc()) gdgs = ZOAUImportError(traceback.format_exc()) mvscmd = ZOAUImportError(traceback.format_exc()) ztypes = ZOAUImportError(traceback.format_exc()) + GenerationDataGroupCreateException = ZOAUImportError(traceback.format_exc()) class DataSet(object): @@ -2948,6 +2950,14 @@ def __init__( # Removed escaping since is not needed by the GDG python api. # self.name = DataSet.escape_data_set_name(self.name) + @staticmethod + def _validate_gdg_name(name): + """Validates the length of a GDG name.""" + if name and len(name) > 35: + raise GenerationDataGroupCreateError( + msg="GDG creation failed: dataset name exceeds 35 characters." + ) + def create(self): """Creates the GDG. @@ -2956,6 +2966,7 @@ def create(self): int Indicates if changes were made. """ + GenerationDataGroup._validate_gdg_name(self.name) gdg = gdgs.create( name=self.name, limit=self.limit, @@ -2984,16 +2995,38 @@ def ensure_present(self, replace): changed = False present = False gdg = None + name = arguments.get("name") + + # Add this line to validate the name length before any operation + GenerationDataGroup._validate_gdg_name(name) + + def _create_gdg(args): + try: + return gdgs.create(**args) + except exceptions._ZOAUExtendableException as e: + # Now, check if it's the specific exception we want to handle. + if isinstance(e, GenerationDataGroupCreateException): + stderr = getattr(e.response, 'stderr_response', '') + if "BGYSC5906E" in stderr : + raise GenerationDataGroupCreateError(msg="FIFO creation failed: the system may not support FIFO datasets or is not configured for it.") + elif "BGYSC6104E" in stderr : + raise GenerationDataGroupCreateError(msg="GDG creation failed: 'purge=true' requires 'scratch=true'.") + else: + raise GenerationDataGroupCreateError(msg=f"GDG creation failed. Raw error: {stderr}") + else: + # If it's a different ZOAU error, re-raise it. + raise e if gdgs.exists(arguments.get("name")): present = True if not present: - gdg = gdgs.create(**arguments) + gdg = _create_gdg(arguments) + else: if not replace: return changed changed = self.ensure_absent(True) - gdg = gdgs.create(**arguments) + gdg = _create_gdg(arguments) if isinstance(gdg, gdgs.GenerationDataGroupView): changed = True return changed @@ -3465,3 +3498,10 @@ def __init__(self, data_set): "Make sure the generation exists and is active.".format(data_set) ) super().__init__(self.msg) + + +class GenerationDataGroupCreateError(Exception): + def __init__(self, msg): + """Error during creation of a Generation Data Group.""" + self.msg = msg + super().__init__(self.msg) diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 06b2f59542..8be69c2add 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -701,10 +701,8 @@ def main(): result['cmd'] = ret['data']['commands'] result['changed'] = ret['data']['changed'] result['found'] = ret['data']['found'] - # Only populate 'rc' if stderr is not empty to not fail the playbook run in a nomatch case - if len(stderr): - result['stderr'] = str(stderr) - result['rc'] = rc + result['stderr'] = str(stderr) + result['rc'] = rc module.exit_json(**result) diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index 83dff1026a..fa40df375c 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -536,7 +536,7 @@ def test_mvs_archive_single_dataset( assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -635,7 +635,7 @@ def test_mvs_archive_single_dataset_adrdssu( assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -725,7 +725,7 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, ds_format for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") # Assert src_data_set is removed - cmd_result = hosts.all.shell(cmd = f"dls {src_data_set}") + cmd_result = hosts.all.shell(cmd = f"dls '{src_data_set}'") for c_result in cmd_result.contacted.values(): assert f"BGYSC1103E No datasets match pattern: {src_data_set}." in c_result.get("stderr") finally: @@ -797,7 +797,7 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, ds_format, data_set) assert result.get("dest") == archive_data_set for ds in target_ds_list: assert ds.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -876,7 +876,7 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, ds_fo else: assert ds.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -947,7 +947,7 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, ds_format for result in archive_result.contacted.values(): assert result.get("changed") is True assert result.get("dest") == archive_data_set - cmd_result = hosts.all.shell(cmd = f"dls {hlq}.*") + cmd_result = hosts.all.shell(cmd = f"dls '{hlq}.*'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -1034,7 +1034,7 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, ds_form else: assert ds.get("name") in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -1129,7 +1129,7 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, ds_format, da assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -1312,7 +1312,7 @@ def test_mvs_archive_single_dataset_encoding( assert result.get("changed") is True assert result.get("dest") == archive_data_set assert src_data_set in result.get("archived") - cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -1386,7 +1386,7 @@ def test_mvs_archive_multiple_dataset_pattern_encoding(ansible_zos_module, ds_fo assert result.get("changed") is True assert result.get("dest") == archive_data_set assert ds_name in result.get("archived") - cmd_result = hosts.all.shell(cmd=f"dls {archive_data_set}") + cmd_result = hosts.all.shell(cmd=f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -1465,7 +1465,7 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_skip_encoding(ansible_zos assert result.get("changed") is True assert result.get("dest") == archive_data_set assert ds_name in result.get("archived") - cmd_result = hosts.all.shell(cmd=f"dls {archive_data_set}") + cmd_result = hosts.all.shell(cmd=f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -1585,7 +1585,7 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib assert result.get("changed") is True assert result.get("dest") == archive_data_set assert ds_name in result.get("archived") - cmd_result = hosts.all.shell(cmd=f"dls {archive_data_set}") + cmd_result = hosts.all.shell(cmd=f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index c1d5805997..0b2ad125f6 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -362,8 +362,8 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): hosts = ansible_zos_module mvs_ps = get_tmp_ds_name() hosts.all.zos_data_set(name=mvs_ps, state="present", type="seq") - hosts.all.copy(content=TEST_DATA, dest=mvs_ps) - hosts.all.copy(content="test", dest=uss_dest_file) + hosts.all.zos_copy(content=TEST_DATA, dest=mvs_ps) + hosts.all.zos_copy(content="test", dest=uss_dest_file) results = hosts.all.zos_encode( src=mvs_ps, dest=uss_dest_file, @@ -477,8 +477,8 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): hosts.all.zos_data_set( name=mvs_pds_member, type="member", state="present" ) - hosts.all.copy(content=TEST_DATA, dest=mvs_pds_member) - hosts.all.copy(content="test", dest=uss_dest_file) + hosts.all.zos_copy(content=TEST_DATA, dest=mvs_pds_member) + hosts.all.zos_copy(content="test", dest=uss_dest_file) results = hosts.all.zos_encode( src=mvs_pds_member, dest=uss_dest_file, From 25604f588e6224126bd17ac7de091c901137494e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 2 Sep 2025 12:29:59 -0500 Subject: [PATCH 29/73] Add_branch_protection_ruleset_and_detect_secrets_baseline (#2263) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add documentation * Delete branch_protection_rules.json --------- Co-authored-by: André Marcel Gutiérrez Benítez --- .secrets.baseline | 85 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 85 insertions(+) create mode 100644 .secrets.baseline diff --git a/.secrets.baseline b/.secrets.baseline new file mode 100644 index 0000000000..3486173fbe --- /dev/null +++ b/.secrets.baseline @@ -0,0 +1,85 @@ +{ + "exclude": { + "files": "zos_mvs_raw.rst|^.secrets.baseline$", + "lines": null + }, + "generated_at": "2025-09-01T16:59:39Z", + "plugins_used": [ + { + "name": "AWSKeyDetector" + }, + { + "name": "ArtifactoryDetector" + }, + { + "name": "AzureStorageKeyDetector" + }, + { + "base64_limit": 4.5, + "name": "Base64HighEntropyString" + }, + { + "name": "BasicAuthDetector" + }, + { + "name": "BoxDetector" + }, + { + "name": "CloudantDetector" + }, + { + "ghe_instance": "github.ibm.com", + "name": "GheDetector" + }, + { + "name": "GitHubTokenDetector" + }, + { + "hex_limit": 3, + "name": "HexHighEntropyString" + }, + { + "name": "IbmCloudIamDetector" + }, + { + "name": "IbmCosHmacDetector" + }, + { + "name": "JwtTokenDetector" + }, + { + "keyword_exclude": null, + "name": "KeywordDetector" + }, + { + "name": "MailchimpDetector" + }, + { + "name": "NpmDetector" + }, + { + "name": "PrivateKeyDetector" + }, + { + "name": "SlackDetector" + }, + { + "name": "SoftlayerDetector" + }, + { + "name": "SquareOAuthDetector" + }, + { + "name": "StripeDetector" + }, + { + "name": "TwilioKeyDetector" + } + ], + "results": {}, + "version": "0.13.1+ibm.62.dss", + "word_list": { + "file": null, + "hash": null + } +} From f27c81c5e39e492d6c2a819a4dcbc46759f9f35f Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Thu, 4 Sep 2025 13:18:18 -0600 Subject: [PATCH 30/73] Port PRs 2248 and 1985 into dev (#2266) * cherry picked fix from 1.16. * Updated changelog * Fixed test zos_find * Merged 2251 * Added a proper quotes in dls to avoid shell resolving when using hlq.* * Avoid leaving temporary uss files behind * Cherry picked 2255 * Updated changelogs * Fixed zos_find tests * Updated zos_zfs_resize tests to add a retry to an intermittent issue * Added assertion catch * Updated changelog --------- Co-authored-by: Mayank Mani Co-authored-by: Rohitash Goyal --- .../2266-port-2255-2251-into-dev.yml | 17 ++++++ plugins/module_utils/data_set.py | 37 ++++++++++--- plugins/modules/zos_job_output.py | 12 ++--- .../functional/modules/test_zos_find_func.py | 4 +- .../modules/test_zos_job_output_func.py | 2 +- .../modules/test_zos_zfs_resize_func.py | 53 +++++++++++++++++-- 6 files changed, 105 insertions(+), 20 deletions(-) create mode 100644 changelogs/fragments/2266-port-2255-2251-into-dev.yml diff --git a/changelogs/fragments/2266-port-2255-2251-into-dev.yml b/changelogs/fragments/2266-port-2255-2251-into-dev.yml new file mode 100644 index 0000000000..e8b0648f8b --- /dev/null +++ b/changelogs/fragments/2266-port-2255-2251-into-dev.yml @@ -0,0 +1,17 @@ +minor_changes: + - zos_data_set - Enhances error messages when deleting a Generation Data Group fails. + (https://github.com/ansible-collections/ibm_zos_core/pull/2266) + +trivial: + - zos_job_output - renamed the input parameter to be called as sysin_dd. + (https://github.com/ansible-collections/ibm_zos_core/pull/2266). + + - test_zos_job_output_func - modified the test case 'test_zos_job_output_job_exists_with_sysin' + to use sysin_dd instead of input. + (https://github.com/ansible-collections/ibm_zos_core/pull/2266). + + - test_zos_zfs_resize_func - Added a retry for autoincrease test in case that would fail. + (https://github.com/ansible-collections/ibm_zos_core/pull/2266). + + - test_zos_find_func - Updated an assertion condition that allows for more data sets to reside in the testing machine. + (https://github.com/ansible-collections/ibm_zos_core/pull/2266). \ No newline at end of file diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 2db045ee62..a02f6f6dee 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -3051,13 +3051,29 @@ def ensure_absent(self, force): rc = datasets.delete(self.name) if rc > 0: if force: - if isinstance(self.gdg, gdgs.GenerationDataGroupView): - self.gdg.delete() - else: - gdg_view = gdgs.GenerationDataGroupView(name=self.name) - gdg_view.delete() - else: - raise DatasetDeleteError(self.raw_name, rc) + try: + if isinstance(self.gdg, gdgs.GenerationDataGroupView): + self.gdg.delete() + else: + gdg_view = gdgs.GenerationDataGroupView(name=self.name) + gdg_view.delete() + except exceptions._ZOAUExtendableException as e: + stderr = getattr(e.response, 'stderr_response', '') + if "BGYSC1603I" in stderr: + raise GenerationDataGroupDeleteError( + msg=( + "Data set deletion failed: the generation data set is currently in use " + " by another job or session. Try deleting after ensuring no active usage." + ) + ) + elif "BGYSC5906E" in stderr: + raise GenerationDataGroupDeleteError( + msg="GDG deletion failed due to an IDCAMS failure. A GDS might be in use or locked." + ) + else: + raise GenerationDataGroupDeleteError( + msg=f"GDG deletion failed. Raw error: {stderr}" + ) else: return False return True @@ -3505,3 +3521,10 @@ def __init__(self, msg): """Error during creation of a Generation Data Group.""" self.msg = msg super().__init__(self.msg) + + +class GenerationDataGroupDeleteError(Exception): + def __init__(self, msg): + """Error during deletion of a Generation Data Group.""" + self.msg = msg + super().__init__(self.msg) diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 13edfbb310..dacd92aa27 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -33,7 +33,7 @@ like "*". - If there is no dd_name, or if dd_name="?", output of all the dds under the given job will be displayed. - - If SYSIN DDs are needed, C(input) should be set to C(true). + - If SYSIN DDs are needed, I(sysin_dd) should be set to C(true). version_added: "1.0.0" author: - "Jack Ho (@jacklotusho)" @@ -63,7 +63,7 @@ type: str required: false aliases: [ ddname ] - input: + sysin_dd: description: - Whether to include SYSIN DDs as part of the output. type: bool @@ -102,7 +102,7 @@ - name: Query a job's output including SYSIN DDs zos_job_output: job_id: "JOB00548" - input: true + sysin_dd: true """ RETURN = r""" @@ -508,8 +508,8 @@ def run_module(): job_id=dict(type="str", required=False), job_name=dict(type="str", required=False), owner=dict(type="str", required=False), - input=dict(type="bool", required=False, default=False), dd_name=dict(type="str", required=False, aliases=['ddname']), + sysin_dd=dict(type="bool", required=False, default=False), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) @@ -518,8 +518,8 @@ def run_module(): job_id=dict(type="job_identifier", required=False), job_name=dict(type="job_identifier", required=False), owner=dict(type="str", required=False), - input=dict(type="bool", required=False, default=False), dd_name=dict(type="str", required=False, aliases=['ddname']), + sysin_dd=dict(type="bool", required=False, default=False), ) try: @@ -538,8 +538,8 @@ def run_module(): job_id = module.params.get("job_id") job_name = module.params.get("job_name") owner = module.params.get("owner") - sysin = module.params.get("input") dd_name = module.params.get("dd_name") + sysin = module.params.get("sysin_dd") if not job_id and not job_name and not owner: module.fail_json(msg="Please provide a job_id or job_name or owner", stderr="", **results) diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 1047c07e7d..cc9fa2ecaa 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -696,7 +696,7 @@ def test_find_vsam_and_gdg_data_sets(ansible_zos_module, volumes_on_systems): resource_type=['cluster', 'gdg'] ) for val in find_res.contacted.values(): - assert len(val.get('data_sets')) == 2 + assert len(val.get('data_sets')) >= 2 assert val.get('matched') == len(val.get('data_sets')) assert {"name":VSAM_NAMES[0], "type": "CLUSTER"} in val.get('data_sets') assert {"name":gdg_a, "type": "GDG"} in val.get('data_sets') @@ -781,7 +781,7 @@ def test_find_vsam_and_nonvsam_data_sets(ansible_zos_module, volumes_on_systems) ) for val in find_res.contacted.values(): assert val.get('msg') is None - assert len(val.get('data_sets')) == 4 + assert len(val.get('data_sets')) >= 4 assert {"name":f'{VSAM_NAMES[0]}.DATA', "type": "DATA"} in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) assert val.get('examined') is not None diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index 4d4c82e227..12949cac7b 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -338,7 +338,7 @@ def test_zos_job_output_job_exists_with_sysin(ansible_zos_module): ) hosts.all.file(path=TEMP_PATH, state="absent") sysin = True - results = hosts.all.zos_job_output(job_name="SYSINS", input=sysin) + results = hosts.all.zos_job_output(job_name="SYSINS", sysin_dd=sysin) for result in results.contacted.values(): assert result.get("changed") is True for job in result.get("jobs"): diff --git a/tests/functional/modules/test_zos_zfs_resize_func.py b/tests/functional/modules/test_zos_zfs_resize_func.py index 01a104b332..482966d3ca 100644 --- a/tests/functional/modules/test_zos_zfs_resize_func.py +++ b/tests/functional/modules/test_zos_zfs_resize_func.py @@ -1003,10 +1003,30 @@ def test_fail_operation(ansible_zos_module): # No auto increment playbook ############################# -def test_no_auto_increase(get_config): + +def test_no_auto_increase_wrapper(get_config): + path = get_config + retries = 0 + max_retries = 5 + success = False + + # Not adding a try/except block here so a real exception can bubble up + # and stop pytest immediately (if using -x or --stop). + while retries < max_retries: + print(f'Trying no_auto_increase. Retry: {retries}.') + result = no_auto_increase(path) + + if result: + success = True + break + + retries += 1 + + assert success is True + +def no_auto_increase(path): ds_name = get_tmp_ds_name() mount_point = "/" + get_random_file_name(dir="tmp") - path = get_config with open(path, 'r') as file: enviroment = yaml.safe_load(file) ssh_key = enviroment["ssh_key"] @@ -1040,14 +1060,36 @@ def test_no_auto_increase(get_config): ) stdout = os.system(command) assert stdout != 0 + return True + except AssertionError: + return False finally: os.remove("inventory.yml") os.remove("playbook.yml") -def test_no_auto_increase_accept(get_config): +def test_no_auto_increase_accept_wrapper(get_config): + path = get_config + retries = 0 + max_retries = 5 + success = False + + # Not adding a try/except block here so a real exception can bubble up + # and stop pytest immediately (if using -x or --stop). + while retries < max_retries: + print(f'Trying no_auto_increase_accept. Retry: {retries}.') + result = no_auto_increase_accept(path) + + if result: + success = True + break + + retries += 1 + + assert success is True + +def no_auto_increase_accept(path): ds_name = get_tmp_ds_name() mount_point = "/" + get_random_file_name(dir="tmp") - path = get_config with open(path, 'r') as file: enviroment = yaml.safe_load(file) ssh_key = enviroment["ssh_key"] @@ -1081,6 +1123,9 @@ def test_no_auto_increase_accept(get_config): ) stdout = os.system(command) assert stdout == 0 + return True + except AssertionError: + return False finally: os.remove("inventory.yml") os.remove("playbook.yml") \ No newline at end of file From 2d2877b61da716a8bd05d3956aa6109525cb7a86 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Sun, 7 Sep 2025 08:04:48 -0600 Subject: [PATCH 31/73] Added RST for started task --- docs/source/modules/zos_started_task.rst | 164 +++++++++++++++++++++++ 1 file changed, 164 insertions(+) create mode 100644 docs/source/modules/zos_started_task.rst diff --git a/docs/source/modules/zos_started_task.rst b/docs/source/modules/zos_started_task.rst new file mode 100644 index 0000000000..e7d97f8155 --- /dev/null +++ b/docs/source/modules/zos_started_task.rst @@ -0,0 +1,164 @@ + +:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_started_task.py + +.. _zos_started_task_module: + + +zos_started_task -- Perform operations on started tasks. +======================================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- start, display, modify, cancel, force and stop a started task + + + + + +Parameters +---------- + + +asid + *asid* is a unique address space identifier which gets assigned to each running started task. + + | **required**: False + | **type**: str + + +device_type + *device_type* is the type of the output device (if any) associated with the task. + + | **required**: False + | **type**: str + + +device_number + *device_number* is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. + + | **required**: False + | **type**: str + + +identifier_name + *identifier_name* is the name that identifies the task to be started. This name can be up to 8 characters long. The first character must be alphabetical. + + | **required**: False + | **type**: str + + +job_account + *job_account* specifies accounting data in the JCL JOB statement for the started task. If the source JCL was a job and has already accounting data, the value that is specified on this parameter overrides the accounting data in the source JCL. + + | **required**: False + | **type**: str + + +job_name + *job_name* is a name which should be assigned to a started task while starting it. If job_name is not specified, then member_name is used as job_name. + + | **required**: False + | **type**: str + + +keyword_parameters + Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than 44 characters in length. + + | **required**: False + | **type**: str + + +member_name + *member_name* is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL for the task to be started. The member can be either a job or a cataloged procedure. + + | **required**: False + | **type**: str + + +operation + The started task operation which needs to be performed. + + If *operation=start* and the data set does not exist on the managed node, no action taken, module completes successfully with *changed=False*. + + + | **required**: True + | **type**: str + | **choices**: start, stop, modify, display, force, cancel + + +parameters + Program parameters passed to the started program, which might be a list in parentheses or a string in single quotation marks + + | **required**: False + | **type**: str + + +reus_asid + When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. + + | **required**: False + | **type**: str + | **choices**: YES, NO + + +subsystem_name + The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. + + | **required**: False + | **type**: str + + +volume_serial + If devicetype is a tape or direct-access device, the volume serial number of the volume is mounted on the device. + + | **required**: False + | **type**: str + + +verbose + Return System logs that describe the task's execution. + + | **required**: False + | **type**: bool + | **default**: False + + +wait_time_s + Option *wait_time_s* is the total time that module `zos_started_tak <./zos_started_task.html>`_ will wait for a submitted task. The time begins when the module is executed on the managed node. + + | **required**: False + | **type**: int + | **default**: 5 + + + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Start a started task using member name. + zos_started_task: + member: "PROCAPP" + operation: "start" + + + + + + + + + + From b864dfddab297a521d3b4d351eef5c19963ce3eb Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Mon, 8 Sep 2025 11:25:32 -0600 Subject: [PATCH 32/73] [Enhancement][zos_backup_restore][2.0] Add sphere keyword to the module (#2257) * Added index option * Updated tests to remove zos_data_set * Fixed test case * Added debug messages * Added index option and test * Cherry picked PR 2260 * Updated changelogs * Fixed sanity issues * Fixed sanity issues * Fixed sanity issues * Added empty keywords dict * Fixed pep8 issue * removed old changelog and added sanityignores for 2.19 --------- Co-authored-by: Rohitash Goyal --- .../2259-add-sphere-zos_backup_restore.yml | 8 + plugins/modules/zos_backup_restore.py | 94 +++++++++ .../modules/test_zos_backup_restore.py | 193 +++++++++++++++++- tests/sanity/ignore-2.19.txt | 28 +++ 4 files changed, 319 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/2259-add-sphere-zos_backup_restore.yml create mode 100644 tests/sanity/ignore-2.19.txt diff --git a/changelogs/fragments/2259-add-sphere-zos_backup_restore.yml b/changelogs/fragments/2259-add-sphere-zos_backup_restore.yml new file mode 100644 index 0000000000..f049134247 --- /dev/null +++ b/changelogs/fragments/2259-add-sphere-zos_backup_restore.yml @@ -0,0 +1,8 @@ +minor_changes: + - zos_backup_restore - Adds ``compress`` option to enable compression of partitioned data sets using hardware compression if available. + Adds ``terse`` option to modify the behavior of executing an AMATERSE step to compress the temporary data set for the backup. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259) + - zos_backup_restore - Adds ``index`` that allows for the backup and restore of all the associated + alternate index (AIX®) clusters and paths of a VSAM. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259) + diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 774543ee4d..210278e7f8 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -135,6 +135,22 @@ with matching name on the target device. type: bool default: False + compress: + description: + - When I(operation=backup), enables compression of partitioned data sets using system-level + compression features. If supported, this may utilize zEDC hardware compression. + - This option can reduce the size of the temporary dataset generated during backup operations + either before the AMATERSE step when I(terse) is True or the resulting backup + when I(terse) is False. + type: bool + default: False + terse: + description: + - When I(operation=backup), executes an AMATERSE step to compress and pack the temporary data set + for the backup. This creates a backup with a format suitable for transferring off-platform. + - If I(operation=backup) and if I(dataset=False) then option I(terse) must be True. + type: bool + default: True sms_storage_class: description: - When I(operation=restore), specifies the storage class to use. The storage class will @@ -201,6 +217,19 @@ not be identified, the value C(TMPHLQ) is used. required: false type: str + index: + description: + - When C(operation=backup) specifies that for any VSAM cluster backup, the backup must also contain + all the associated alternate index (AIX®) clusters and paths. + - When C(operation=restore) specifies that for any VSAM cluster dumped with the SPHERE keyword, + the module must also restore all associated AIX® clusters and paths. + - The alternate index is a VSAM function that allows logical records of a + KSDS or ESDS to be accessed sequentially and directly by more than one key + field. The cluster that has the data is called the base cluster. An + alternate index cluster is then built from the base cluster. + type: bool + required: false + default: false attributes: action: @@ -256,6 +285,15 @@ - user.gdg(0) backup_name: my.backup.dzp +- name: Backup datasets using compress + zos_backup_restore: + operation: backup + compress: true + terse: true + data_sets: + include: someds.name.here + backup_name: my.backup.dzp + - name: Backup all datasets matching the pattern USER.** to UNIX file /tmp/temp_backup.dzp, ignore recoverable errors. zos_backup_restore: operation: backup @@ -349,6 +387,16 @@ backup_name: /tmp/temp_backup.dzp sms_storage_class: DB2SMS10 sms_management_class: DB2SMS10 + +- name: Backup all data sets matching the pattern USER.VSAM.** to z/OS UNIX + file /tmp/temp_backup.dzp and ensure the VSAM alternate index are preserved. + zos_backup_restore: + operation: backup + data_sets: + include: user.vsam.** + backup_name: /tmp/temp_backup.dzp + index: true + """ RETURN = r""" changed: @@ -420,10 +468,14 @@ def main(): backup_name=dict(type="str", required=True), recover=dict(type="bool", default=False), overwrite=dict(type="bool", default=False), + compress=dict(type="bool", default=False), + terse=dict(type="bool", default=True), sms_storage_class=dict(type="str", required=False), sms_management_class=dict(type="str", required=False), hlq=dict(type="str", required=False), tmp_hlq=dict(type="str", required=False), + # 2.0 redesign extra values for ADRDSSU keywords + index=dict(type="bool", required=False, default=False), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=False) @@ -439,11 +491,21 @@ def main(): backup_name = params.get("backup_name") recover = params.get("recover") overwrite = params.get("overwrite") + compress = params.get("compress") + terse = params.get("terse") sms_storage_class = params.get("sms_storage_class") sms_management_class = params.get("sms_management_class") hlq = params.get("hlq") tmp_hlq = params.get("tmp_hlq") + # 2.0 redesign extra ADRDSSU keywords + sphere = params.get("index") + + # extra keyword supported by ZOAU but not part of their signature. + keywords = {} + if sphere: + keywords.update(sphere=None) + if operation == "backup": backup( backup_name=backup_name, @@ -453,12 +515,15 @@ def main(): full_volume=full_volume, temp_volume=temp_volume, overwrite=overwrite, + compress=compress, + terse=terse, recover=recover, space=space, space_type=space_type, sms_storage_class=sms_storage_class, sms_management_class=sms_management_class, tmp_hlq=tmp_hlq, + keywords=keywords, ) else: restore( @@ -476,6 +541,7 @@ def main(): sms_storage_class=sms_storage_class, sms_management_class=sms_management_class, tmp_hlq=tmp_hlq, + keywords=keywords, ) result["backup_name"] = backup_name result["changed"] = True @@ -546,10 +612,14 @@ def parse_and_validate_args(params): backup_name=dict(type=backup_name_type, required=False), recover=dict(type="bool", default=False), overwrite=dict(type="bool", default=False), + compress=dict(type="bool", default=False), + terse=dict(type="bool", default=True), sms_storage_class=dict(type=sms_type, required=False), sms_management_class=dict(type=sms_type, required=False), hlq=dict(type=hlq_type, default=None, dependencies=["operation"]), tmp_hlq=dict(type=hlq_type, required=False), + # 2.0 redesign extra values for ADRDSSU keywords + index=dict(type="bool", required=False, default=False), ) parsed_args = BetterArgParser(arg_defs).parse_args(params) @@ -567,12 +637,15 @@ def backup( full_volume, temp_volume, overwrite, + compress, + terse, recover, space, space_type, sms_storage_class, sms_management_class, tmp_hlq, + keywords, ): """Backup data sets or a volume to a new data set or unix file. @@ -580,6 +653,8 @@ def backup( ---------- backup_name : str The data set or UNIX path to place the backup. + compress : bool + Compress the dataset or file produced by ADRDSSU before taking backup. include_data_sets : list A list of data set patterns to include in the backup. exclude_data_sets : list @@ -590,6 +665,8 @@ def backup( Specifies if a backup will be made of the entire volume. temp_volume : bool Specifies the volume that should be used to store temporary files. + terse : bool + Uses AMATERSE to compress and pack the dump generated by ADRDSSU. overwrite : bool Specifies if existing data set or UNIX file matching I(backup_name) should be deleted. recover : bool @@ -604,6 +681,8 @@ def backup( Specifies the management class to use. tmp_hlq : str Specifies the tmp hlq to temporary datasets. + keywords : dict + Specifies ADRDSSU keywords that is passed directly to the dunzip utility. """ args = locals() zoau_args = to_dzip_args(**args) @@ -625,6 +704,7 @@ def restore( sms_storage_class, sms_management_class, tmp_hlq, + keywords, ): """Restore data sets or a volume from the backup. @@ -662,6 +742,8 @@ def restore( Specifies the management class to use. tmp_hlq : str Specifies the tmp hlq to temporary datasets. + keywords : dict + Specifies ADRDSSU keywords that is passed directly to the dunzip utility. Raises ------ @@ -965,6 +1047,12 @@ def to_dzip_args(**kwargs): if kwargs.get("overwrite"): zoau_args["overwrite"] = kwargs.get("overwrite") + if kwargs.get("compress"): + zoau_args["compress"] = kwargs.get("compress") + + if kwargs.get("terse"): + zoau_args["terse"] = kwargs.get("terse") + if kwargs.get("sms_storage_class"): zoau_args["storage_class_name"] = kwargs.get("sms_storage_class") @@ -980,6 +1068,9 @@ def to_dzip_args(**kwargs): if kwargs.get("tmp_hlq"): zoau_args["tmphlq"] = str(kwargs.get("tmp_hlq")) + if kwargs.get("keywords"): + zoau_args["keywords"] = kwargs.get("keywords") + return zoau_args @@ -1043,6 +1134,9 @@ def to_dunzip_args(**kwargs): zoau_args["high_level_qualifier"] = str(kwargs.get("tmp_hlq")) zoau_args["keep_original_hlq"] = False + if kwargs.get("keywords"): + zoau_args["keywords"] = kwargs.get("keywords") + return zoau_args diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index ecaf87c871..b5d8070fdb 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -25,12 +25,13 @@ import string import random import time +import json from ibm_zos_core.tests.helpers.utils import get_random_file_name +from ibm_zos_core.tests.helpers.volumes import Volume_Handler DATA_SET_CONTENTS = "HELLO WORLD" TMP_DIRECTORY = "/tmp/" - c_pgm="""#include #include #include @@ -85,6 +86,11 @@ def create_file_with_contents(hosts, path, contents): assert_module_did_not_fail(results) +def create_vsam(hosts, data_set_name): + results = hosts.all.shell(cmd=f"dtouch -tksds -k4:0 {data_set_name}") + assert_module_did_not_fail(results) + + def delete_data_set_or_file(hosts, name): if name.startswith("/"): delete_file(hosts, name) @@ -93,7 +99,7 @@ def delete_data_set_or_file(hosts, name): def delete_data_set(hosts, data_set_name): - hosts.all.shell(cmd=f"drm '{data_set_name}'") + hosts.all.shell(cmd=f"drm -F '{data_set_name}'") def delete_file(hosts, path): @@ -254,6 +260,117 @@ def test_backup_of_data_set(ansible_zos_module, backup_name, overwrite, recover) delete_data_set_or_file(hosts, backup_name) delete_remnants(hosts) +@pytest.mark.parametrize( + "backup_name, terse", + [ + ("DATA_SET", False), + ("DATA_SET", True), + ], +) +def test_backup_and_restore_of_data_set_with_compression_and_terse(ansible_zos_module, backup_name, terse): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + backup_name_uncompressed = get_tmp_ds_name(1, 1) + backup_name_compressed = get_tmp_ds_name(1, 1) + size_uncompressed = 0 + size_compressed = 0 + + try: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, backup_name_uncompressed) + delete_data_set_or_file(hosts, backup_name_compressed) + + # Create large data set using decho + shell_script_content = f"""#!/bin/bash +for i in {{1..100}} +do + decho -a "this is a test line to make it big" "{data_set_name}" +done +""" + hosts.all.shell(f"echo '{shell_script_content}' > shell_script.sh") + hosts.all.shell("chmod +x shell_script.sh") + hosts.all.shell("./shell_script.sh") + + cmd_result_dataset = hosts.all.shell(f"dls -j -s {data_set_name}") + for result in cmd_result_dataset.contacted.values(): + output_dataset = json.loads(result.get("stdout")) + size_dataset = int(output_dataset["data"]["datasets"][0]["used"]) + + results_uncompressed = hosts.all.zos_backup_restore( + operation="backup", + data_sets=dict(include=data_set_name), + backup_name=backup_name_uncompressed, + compress=False, + terse=True, + ) + assert_module_did_not_fail(results_uncompressed) + assert_data_set_or_file_exists(hosts, backup_name_uncompressed) + + cmd_result_uncompressed = hosts.all.shell(f"dls -j -s {backup_name_uncompressed}") + for result in cmd_result_uncompressed.contacted.values(): + output = json.loads(result.get("stdout")) + size_uncompressed = int(output["data"]["datasets"][0]["used"]) + + results_compressed = hosts.all.zos_backup_restore( + operation="backup", + data_sets=dict(include=data_set_name), + backup_name=backup_name_compressed, + compress=True, + terse=terse, + ) + assert_module_did_not_fail(results_compressed) + assert_data_set_or_file_exists(hosts, backup_name_compressed) + + cmd_result_compressed = hosts.all.shell(f"dls -j -s {backup_name_compressed}") + for result in cmd_result_compressed.contacted.values(): + output_compressed = json.loads(result.get("stdout")) + size_compressed = int(output_compressed["data"]["datasets"][0]["used"]) + + #When using compress=True with terse=True(default), two different algorithms are used. + #The zEDC hardware compresses the data, and then AMATERSE reprocess that compressed data. + #It's not designed to compress already highly compressed data. The overhead of the AMATERSE, + #combined with zEDC hardware compress, can outweigh the benefits. + #This lead to a final file size larger than if you had only used Terse. + + if size_uncompressed > 0: + assert size_compressed > size_uncompressed, \ + f"Compressed size ({size_compressed}) is not smaller ({size_uncompressed})" + + #deleting dataset to test the restore. + delete_data_set_or_file(hosts, data_set_name) + + #testing restoration of files + hosts.all.zos_backup_restore( + operation="restore", + backup_name=backup_name_compressed + ) + cmd_result_restored = hosts.all.shell(f"dls -j -s {data_set_name}") + for result in cmd_result_restored.contacted.values(): + output_restored = json.loads(result.get("stdout")) + size_restored_compressed = int(output_restored["data"]["datasets"][0]["used"]) + + #deleting dataset to test the restore + delete_data_set_or_file(hosts, data_set_name) + + hosts.all.zos_backup_restore( + operation="restore", + backup_name=backup_name_uncompressed, + overwrite=True, + ) + cmd_result_restored = hosts.all.shell(f"dls -j -s {data_set_name}") + for result in cmd_result_restored.contacted.values(): + output_restored = json.loads(result.get("stdout")) + size_restored_uncompressed = int(output_restored["data"]["datasets"][0]["used"]) + if size_dataset > 0: + assert (size_dataset == size_restored_compressed == size_restored_uncompressed), \ + f"Restoration of {data_set_name} was not done properly. Unable to restore datasets." + + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, backup_name_uncompressed) + delete_data_set_or_file(hosts, backup_name_compressed) + delete_remnants(hosts) + # Commenting these tests because of issue https://github.com/ansible-collections/ibm_zos_core/issues/2235 # which likely is a zoau bug that needs to be fixed. # @pytest.mark.parametrize( @@ -1134,8 +1251,76 @@ def managed_user_backup_of_data_set_tmphlq_restricted_user(ansible_zos_module): f"Backup name '{backup_name}' is there in output so tmphlq failed." print(result) assert result.get("changed", False) is False - + + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, backup_name) + delete_remnants(hosts, hlqs) + + +def test_backup_of_vsam_index(ansible_zos_module, volumes_with_vvds): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + alternate_index = get_tmp_ds_name() + backup_name = get_tmp_ds_name() + + try: + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + # Create VSAM KSDS + create_vsam( + hosts, data_set_name + ) + # Create alternate indexes + aix_cmd = f""" +echo ' DEFINE ALTERNATEINDEX (NAME({alternate_index}) - + RELATE({data_set_name}) - + KEYS(4 0) - + VOLUMES({volume}) - + CYLINDERS(10 1) - + FREESPACE(10 10) - + NONUNIQUEKEY) - + DATA (NAME({alternate_index}.DATA)) - + INDEX (NAME({alternate_index}.INDEX)) ' | mvscmdauth --pgm=IDCAMS --sysprint=* --sysin=stdin + + """ + results = hosts.all.shell(cmd=f"{aix_cmd}") + assert_module_did_not_fail(results) + + + results = hosts.all.zos_backup_restore( + operation="backup", + data_sets=dict(include=data_set_name), + backup_name=backup_name, + index=True, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, backup_name) + + # Delete the vsam data set and alternate index + delete_data_set(hosts, data_set_name) + delete_data_set(hosts, alternate_index) + + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=backup_name, + index=True, + ) + + # Validate that both original vsam and alternate index exist + vls_result = hosts.all.shell(f"vls {alternate_index}") + assert_module_did_not_fail(vls_result) + for result in vls_result.contacted.values(): + assert alternate_index in result.get("stdout") + assert f"{alternate_index}.DATA" in result.get("stdout") + assert f"{alternate_index}.INDEX" in result.get("stdout") + vls_result = hosts.all.shell(f"vls {data_set_name}") + assert_module_did_not_fail(vls_result) + for result in vls_result.contacted.values(): + assert data_set_name in result.get("stdout") + assert f"{data_set_name}.DATA" in result.get("stdout") + assert f"{data_set_name}.INDEX" in result.get("stdout") finally: delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, alternate_index) delete_data_set_or_file(hosts, backup_name) - delete_remnants(hosts, hlqs) \ No newline at end of file diff --git a/tests/sanity/ignore-2.19.txt b/tests/sanity/ignore-2.19.txt new file mode 100644 index 0000000000..52cbed2aa3 --- /dev/null +++ b/tests/sanity/ignore-2.19.txt @@ -0,0 +1,28 @@ +plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_submit.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_mvs_raw.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_operator.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.rexx validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_replace.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_stat.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file From 08cdd4cc9b7d57a186fe43c9cfb4a0fc8fdcf0a0 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Tue, 9 Sep 2025 00:36:34 +0530 Subject: [PATCH 33/73] Features update --- plugins/modules/zos_started_task.py | 1220 ++++++++++++----- .../modules/test_zos_started_task_func.py | 627 ++++++++- 2 files changed, 1421 insertions(+), 426 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 21bafd7470..9b8ebd9d2e 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -27,103 +27,274 @@ - start, display, modify, cancel, force and stop a started task options: - asid: + start_task: description: - - I(asid) is a unique address space identifier which gets assigned to each running started task. + - The start operation of the started task. required: false - type: str - device_type: + type: dict + suboptions: + device_type: + description: + - I(device_type) is the type of the output device (if any) associated with the task. + required: false + type: str + device_number: + description: + - I(device_number) is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. + A slash (/) must precede a 4-digit number but is not before a 3-digit number. + required: false + type: str + identifier_name: + description: + - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. + The first character must be alphabetical. + required: false + type: str + aliases: + - identifier + job_account: + description: + - I(job_account) specifies accounting data in the JCL JOB statement for the started task. + If the source JCL was a job and has already accounting data, the value that is specified on this parameter + overrides the accounting data in the source JCL. + required: false + type: str + job_name: + description: + - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, + then member_name is used as job_name. + required: false + type: str + aliases: + - job + - task + - task_name + keyword_parameters: + description: + - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. + The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than + 44 characters in length. + required: false + type: dict + member_name: + description: + - I(member_name) is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL + for the task to be started. The member can be either a job or a cataloged procedure. + required: false + type: str + aliases: + - member + parameters: + description: + - Program parameters passed to the started program, which might be a list in parentheses or a string in single quotation marks + required: false + type: list + elements: str + reus_asid: + description: + - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, + a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified + on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. + required: false + type: str + choices: + - 'YES' + - 'NO' + subsystem: + description: + - The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, + which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. + required: false + type: str + volume_serial: + description: + - If devicetype is a tape or direct-access device, the volume serial number of the volume is mounted on the device. + required: false + type: str + display_task: description: - - I(device_type) is the type of the output device (if any) associated with the task. + - The display operation of the started task. required: false - type: str - device_number: + type: dict + suboptions: + identifier_name: + description: + - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. + The first character must be alphabetical. + required: false + type: str + aliases: + - identifier + job_name: + description: + - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, + then member_name is used as job_name. + required: false + type: str + aliases: + - job + - task + - task_name + modify_task: description: - - I(device_number) is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. - A slash (/) must precede a 4-digit number but is not before a 3-digit number. + - The modify operation of the started task. required: false - type: str - identifier_name: + type: dict + suboptions: + identifier_name: + description: + - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. + The first character must be alphabetical. + required: false + type: str + aliases: + - identifier + job_name: + description: + - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, + then member_name is used as job_name. + required: false + type: str + aliases: + - job + - task + - task_name + parameters: + description: + - Program parameters passed to the started program, which might be a list in parentheses or a string in single quotation marks + required: false + type: list + elements: str + cancel_task: description: - - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. - The first character must be alphabetical. + - The cancel operation of the started task. required: false - type: str - aliases: - - identifier - job_account: + type: dict + suboptions: + armrestart: + description: + - I(armrestart) indicates to restart a started task automatically after the cancel completes. + required: false + type: bool + asid: + description: + - I(asid) is a unique address space identifier which gets assigned to each running started task. + required: false + type: str + dump: + description: + - I(dump) indicates to take dump before ending a started task. + required: false + type: bool + identifier_name: + description: + - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. + The first character must be alphabetical. + required: false + type: str + aliases: + - identifier + job_name: + description: + - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, + then member_name is used as job_name. + required: false + type: str + aliases: + - job + - task + - task_name + userid: + description: + - I(userid) is the user ID of the time-sharing user you want to cancel. + required: false + type: str + force_task: description: - - I(job_account) specifies accounting data in the JCL JOB statement for the started task. - If the source JCL was a job and has already accounting data, the value that is specified on this parameter - overrides the accounting data in the source JCL. + - The force operation of the started task. required: false - type: str - job_name: + type: dict + suboptions: + arm: + description: + - I(arm) indicates to execute normal task termination routines without causing address space destruction. + required: false + type: bool + armrestart: + description: + - I(armrestart) indicates to restart a started task automatically after the cancel completes. + required: false + type: bool + asid: + description: + - I(asid) is a unique address space identifier which gets assigned to each running started task. + required: false + type: str + identifier_name: + description: + - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. + The first character must be alphabetical. + required: false + type: str + aliases: + - identifier + job_name: + description: + - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, + then member_name is used as job_name. + required: false + type: str + aliases: + - job + - task + - task_name + retry: + description: + - I(retry) is applicable for only FORCE TCB. + required: false + type: str + choices: + - 'YES' + - 'NO' + tcb_address: + description: + - I(tcb_address) is a 6-digit hexadecimal TCB address of the task to terminate. + required: false + type: str + userid: + description: + - I(userid) is the user ID of the time-sharing user you want to cancel. + required: false + type: str + stop_task: description: - - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, - then member_name is used as job_name. + - The stop operation of the started task. required: false - type: str - aliases: - - job - - task - - task_name - keyword_parameters: - description: - - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. - The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than - 44 characters in length. - required: false - type: str - member_name: - description: - - I(member_name) is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL - for the task to be started. The member can be either a job or a cataloged procedure. - required: false - type: str - aliases: - - member - operation: - description: - - The started task operation which needs to be performed. - - > - If I(operation=start) and the data set does not exist on the managed node, - no action taken, module completes successfully with I(changed=False). - required: true - type: str - choices: - - start - - stop - - modify - - display - - force - - cancel - parameters: - description: - - Program parameters passed to the started program, which might be a list in parentheses or a string in single quotation marks - required: false - type: str - reus_asid: - description: - - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, - a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified - on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. - required: false - type: str - choices: - - 'YES' - - 'NO' - subsystem_name: - description: - - The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, - which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. - required: false - type: str - volume_serial: - description: - - If devicetype is a tape or direct-access device, the volume serial number of the volume is mounted on the device. - required: false - type: str + type: dict + suboptions: + asid: + description: + - I(asid) is a unique address space identifier which gets assigned to each running started task. + required: false + type: str + identifier_name: + description: + - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. + The first character must be alphabetical. + required: false + type: str + aliases: + - identifier + job_name: + description: + - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, + then member_name is used as job_name. + required: false + type: str + aliases: + - job + - task + - task_name verbose: description: - Return System logs that describe the task's execution. @@ -132,12 +303,11 @@ default: false wait_time_s: required: false - default: 5 + default: 0 type: int description: - - Option I(wait_time_s) is the total time that module - L(zos_started_tak,./zos_started_task.html) will wait for a submitted task. The time begins when the module is executed - on the managed node. + - Option I(wait_time_s) is the the maximum amount of time, in centiseconds (0.01s), to wait for a response after submitting + the console command. Default value of 0 means to wait the default amount of time supported by the opercmd utility. """ EXAMPLES = r""" - name: Start a started task using member name. @@ -164,7 +334,7 @@ ) try: - from zoautil_py import opercmd,zsystem + from zoautil_py import opercmd, zsystem except ImportError: zoau_exceptions = ZOAUImportError(traceback.format_exc()) @@ -190,15 +360,15 @@ def execute_command(operator_cmd, started_task_name, execute_display_before=Fals Returns ------- - OperatorQueryResult - The result of the command. + tuple + Tuple containing the RC, standard out, standard err of the + query script and started task parameters. """ task_params = {} # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: timeout_c = 100 * timeout_s if execute_display_before: task_params = execute_display_command(started_task_name, timeout_c) - response = opercmd.execute(operator_cmd, timeout_c, *args, **kwargs) if execute_display_after: @@ -209,40 +379,392 @@ def execute_command(operator_cmd, started_task_name, execute_display_before=Fals stderr = response.stderr_response return rc, stdout, stderr, task_params -def execute_display_command(started_task_name, timeout_c): - cmd = "d a,"+started_task_name - display_response = opercmd.execute(cmd, timeout_c) + +def execute_display_command(started_task_name, timeout_s): + """Execute operator display command. + + Parameters + ---------- + started_task_name : str + The name of started task. + timeout_s : int + Timeout to wait for the command execution, measured in centiseconds. + + Returns + ------- + list + List contains extracted parameters from display command output of started task + """ + cmd = "d a," + started_task_name + display_response = opercmd.execute(cmd, timeout_s) task_params = [] if display_response.rc == 0 and display_response.stderr_response == "": task_params = extract_keys(display_response.stdout_response) return task_params -def prepare_start_command(member, identifier, job_name, job_account, device, volume_serial, subsystem_name, reus_asid, parameters, keyword_parameters): +def validate_and_prepare_start_command(module, start_parms): + """Validates parameters and creates start command + + Parameters + ---------- + start_parms : dict + The started task start command parameters. + + Returns + ------- + started_task_name + The name of started task. + cmd + The start command in string format. + """ + member = start_parms.get('member_name') + identifier = start_parms.get('identifier_name') + job_name = start_parms.get('job_name') + job_account = start_parms.get('job_account') + parameters = start_parms.get('parameters') or [] + device_type = start_parms.get('device_type') or "" + device_number = start_parms.get('device_number') or "" + volume_serial = start_parms.get('volume_serial') or "" + subsystem_name = start_parms.get('subsystem') + reus_asid = start_parms.get('reus_asid') + keyword_parameters = start_parms.get('keyword_parameters') + keyword_parameters_string = "" + device = device_type if device_type else device_number + # Validations + if device_number and device_type: + module.fail_json( + rc=5, + msg="device_number and device_type are mutually exclusive.", + changed=False + ) + if job_account and len(job_account) > 55: + module.fail_json( + rc=5, + msg="job_account value should not exceed 55 characters.", + changed=False + ) + if device_number: + devnum_len = len(device_number) + if devnum_len not in (3, 5) or (devnum_len == 5 and not device_number.startswith("/")): + module.fail_json( + rc=5, + msg="Invalid device_number.", + changed=False + ) + if subsystem_name and len(subsystem_name) > 4: + module.fail_json( + rc=5, + msg="The subsystem_name must be 1 - 4 characters.", + changed=False + ) + if keyword_parameters: + for key, value in keyword_parameters.items(): + key_len = len(key) + value_len = len(value) + if key_len > 44 or value_len > 44 or key_len + value_len > 65: + module.fail_json( + rc=5, + msg="The length of a keyword=option is exceeding 66 characters or length of an individual value is exceeding 44 characters." + + "key:{0}, value:{1}".format(key, value), + changed=False + ) + else: + if keyword_parameters_string: + keyword_parameters_string = keyword_parameters_string + "," + f"{key}={value}" + else: + keyword_parameters_string = f"{key}={value}" + if job_name: + started_task_name = job_name + elif member: + started_task_name = member + if identifier: + started_task_name = started_task_name + "." + identifier + else: + module.fail_json( + rc=5, + msg="member_name is missing which is mandatory.", + changed=False + ) + if not member: + module.fail_json( + rc=5, + msg="member_name is missing which is mandatory.", + changed=False + ) + if job_name and identifier: + module.fail_json( + rc=5, + msg="job_name and identifier_name are mutually exclusive while starting a started task.", + changed=False + ) + parameters_updated = "" + if parameters: + if len(parameters) == 1: + parameters_updated = "'" + parameters[0] + "'" + else: + parameters_updated = f"({','.join(parameters)})" + cmd = 'S ' + member if identifier: - cmd = cmd + "." + identifier + "," + device + "," + volume_serial + "," + parameters + cmd = cmd + "." + identifier + if parameters: + cmd = cmd + "," + device + "," + volume_serial + "," + parameters_updated + elif volume_serial: + cmd = cmd + "," + device + "," + volume_serial + elif device: + cmd = cmd + "," + device if job_name: - cmd = cmd + ",jobname=" + job_name + cmd = cmd + ",JOBNAME=" + job_name if job_account: - cmd = cmd + ",jobacct=" + job_account + cmd = cmd + ",JOBACCT=" + job_account if subsystem_name: cmd = cmd + ",SUB=" + subsystem_name if reus_asid: cmd = cmd + ",REUSASID=" + reus_asid - if keyword_parameters: - cmd = cmd + "," + keyword_parameters - return cmd + if keyword_parameters_string: + cmd = cmd + "," + keyword_parameters_string + return started_task_name, cmd + + +def prepare_display_command(module, display_parms): + """Validates parameters and creates display command + + Parameters + ---------- + display_parms : dict + The started task display command parameters. + + Returns + ------- + started_task_name + The name of started task. + cmd + The display command in string format. + """ + identifier = display_parms.get('identifier_name') + job_name = display_parms.get('job_name') + started_task_name = "" + if job_name: + started_task_name = job_name + if identifier: + started_task_name = started_task_name + "." + identifier + else: + module.fail_json( + rc=5, + msg="job_name is missing which is mandatory.", + changed=False + ) + cmd = 'D A,' + started_task_name + return started_task_name, cmd + + +def prepare_stop_command(module, stop_parms): + """Validates parameters and creates stop command + + Parameters + ---------- + stop_parms : dict + The started task stop command parameters. + + Returns + ------- + started_task_name + The name of started task. + cmd + The stop command in string format. + """ + identifier = stop_parms.get('identifier_name') + job_name = stop_parms.get('job_name') + asid = stop_parms.get('asid') + started_task_name = "" + if job_name: + started_task_name = job_name + if identifier: + started_task_name = started_task_name + "." + identifier + else: + module.fail_json( + rc=5, + msg="job_name is missing which is mandatory.", + changed=False + ) + cmd = 'P ' + started_task_name + if asid: + cmd = cmd + ',A=' + asid + return started_task_name, cmd + + +def prepare_modify_command(module, modify_parms): + """Validates parameters and creates modify command + + Parameters + ---------- + modify_parms : dict + The started task modify command parameters. + + Returns + ------- + started_task_name + The name of started task. + cmd + The modify command in string format. + """ + identifier = modify_parms.get('identifier_name') + job_name = modify_parms.get('job_name') + parameters = modify_parms.get('parameters') + started_task_name = "" + if job_name: + started_task_name = job_name + if identifier: + started_task_name = started_task_name + "." + identifier + else: + module.fail_json( + rc=5, + msg="job_name is missing which is mandatory.", + changed=False + ) + if parameters is None: + module.fail_json( + rc=5, + msg="parameters are mandatory.", + changed=False + ) + cmd = 'F ' + started_task_name + "," + ",".join(parameters) + return started_task_name, cmd + + +def prepare_cancel_command(module, cancel_parms): + """Validates parameters and creates cancel command + + Parameters + ---------- + cancel_parms : dict + The started task modify command parameters. + + Returns + ------- + started_task_name + The name of started task. + cmd + The cancel command in string format. + """ + identifier = cancel_parms.get('identifier_name') + job_name = cancel_parms.get('job_name') + asid = cancel_parms.get('asid') + dump = cancel_parms.get('dump') + armrestart = cancel_parms.get('armrestart') + userid = cancel_parms.get('userid') + started_task_name = "" + if job_name: + started_task_name = job_name + if identifier: + started_task_name = started_task_name + "." + identifier + elif userid: + started_task_name = "U=" + userid + else: + module.fail_json( + rc=5, + msg="Both job_name and userid are missing, one of them is needed to cancel a task.", + changed=False + ) + if userid and armrestart: + module.fail_json( + rc=5, + msg="The ARMRESTART parameter is not valid with the U=userid parameter.", + changed=False + ) + cmd = 'C ' + started_task_name + if asid: + cmd = cmd + ',A=' + asid + if dump: + cmd = cmd + ',DUMP' + if armrestart: + cmd = cmd + ',ARMRESTART' + return started_task_name, cmd + + +def prepare_force_command(module, force_parms): + """Validates parameters and creates force command + + Parameters + ---------- + force_parms : dict + The started task force command parameters. + + Returns + ------- + started_task_name + The name of started task. + cmd + The force command in string format. + """ + identifier = force_parms.get('identifier_name') + job_name = force_parms.get('job_name') + asid = force_parms.get('asid') + arm = force_parms.get('arm') + armrestart = force_parms.get('armrestart') + userid = force_parms.get('userid') + tcb_address = force_parms.get('tcb_address') + retry = force_parms.get('retry') + started_task_name = "" + if tcb_address and len(tcb_address) != 6: + module.fail_json( + rc=5, + msg="The TCB address of the task should be exactly 6-digit hexadecimal.", + changed=False + ) + if retry and not tcb_address: + module.fail_json( + rc=5, + msg="The RETRY parameter is valid with the TCB parameter only.", + changed=False + ) + if userid and armrestart: + module.fail_json( + rc=5, + msg="The ARMRESTART parameter is not valid with the U=userid parameter.", + changed=False + ) + if job_name: + started_task_name = job_name + if identifier: + started_task_name = started_task_name + "." + identifier + elif userid: + started_task_name = "U=" + userid + else: + module.fail_json( + rc=5, + msg="Both job_name and userid are missing, one of them is needed to cancel a task.", + changed=False + ) + cmd = 'FORCE ' + started_task_name + if asid: + cmd = cmd + ',A=' + asid + if arm: + cmd = cmd + ',ARM' + if armrestart: + cmd = cmd + ',ARMRESTART' + if tcb_address: + cmd = cmd + ',TCB=' + tcb_address + if retry: + cmd = cmd + ',RETRY=' + retry + return started_task_name, cmd def extract_keys(stdout): + """Extracts keys and values from the given stdout + + Parameters + ---------- + stdout : string + The started task display command output + + Returns + ------- + tasks + The list of task parameters. + """ keys = {'A': 'ASID', 'CT': 'CPU_Time', 'ET': 'Elapsed_Time', 'WUID': 'WUID', 'USERID': 'USERID', 'P': 'Priority'} - # params = {} - # for key in keys: - # parm = re.search(rf"{key}=([^\s]+)", stdout) - # if parm: - # params[keys[key]] = parm.group(1) - # return params lines = stdout.strip().split('\n') tasks = [] current_task = None @@ -275,16 +797,103 @@ def extract_keys(stdout): def fetch_logs(command): + """Extracts keys and values from the given stdout + + Parameters + ---------- + command : string + The comand which need to be checked in system logs + + Returns + ------- + list + The list of logs from SYSLOG + """ stdout = zsystem.read_console(options='-t1') stdout_lines = stdout.splitlines() first = None + pattern = rf"\b{command}\b" for i, line in enumerate(stdout_lines): - if command in line: - if first is None: - first = i - if first is None: + if re.search(pattern, line, re.IGNORECASE): + first = i + if not first: return "" - return stdout_lines[first:] + logs = "\n".join(stdout_lines[first:]) + return logs + + +def parse_and_validate_args(params): + """Parse and validate input parameters + + Parameters + ---------- + params : dict + The dictionary which has input parameters. + + Returns + ------- + dict + The validated list of input parameters. + """ + start_args = dict( + device_type=dict(type="str", required=False), + device_number=dict(type="str", required=False), + identifier_name=dict(type="identifier_name", required=False, aliases=["identifier"]), + job_account=dict(type="str", required=False), + job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), + keyword_parameters=dict(type="basic_dict", required=False), + member_name=dict(type="member_name", required=False, aliases=["member"]), + parameters=dict(type="list", elements="str", required=False), + reus_asid=dict(type="str", required=False), + subsystem=dict(type="str", required=False), + volume_serial=dict(type="str", required=False) + ) + display_args = dict( + identifier_name=dict(type="identifier_name", required=False, aliases=["identifier"]), + job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]) + ) + modify_args = dict( + identifier_name=dict(type="identifier_name", required=False, aliases=["identifier"]), + job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), + parameters=dict(type="list", elements="str", required=False) + ) + cancel_args = dict( + armrestart=dict(type="bool", required=False), + asid=dict(type="str", required=False), + dump=dict(type="bool", required=False), + identifier_name=dict(type="identifier_name", required=False, aliases=["identifier"]), + job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), + userid=dict(type="str", required=False) + ) + force_args = dict( + arm=dict(type="bool", required=False), + armrestart=dict(type="bool", required=False), + asid=dict(type="str", required=False), + identifier_name=dict(type="identifier_name", required=False, aliases=["identifier"]), + job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), + retry=dict(type="str", required=False), + tcb_address=dict(type="str", required=False), + userid=dict(type="str", required=False) + ) + stop_args = dict( + asid=dict(type="str", required=False), + identifier_name=dict(type="identifier_name", required=False, aliases=["identifier"]), + job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]) + ) + module_args = dict( + start_task=dict(type="dict", required=False, options=start_args), + stop_task=dict(type="dict", required=False, options=stop_args), + display_task=dict(type="dict", required=False, options=display_args), + modify_task=dict(type="dict", required=False, options=modify_args), + cancel_task=dict(type="dict", required=False, options=cancel_args), + force_task=dict(type="dict", required=False, options=force_args), + verbose=dict(type="bool", required=False), + wait_time_s=dict(type="int", default=5) + ) + parser = better_arg_parser.BetterArgParser(module_args) + parsed_args = parser.parse_args(params) + return parsed_args + def run_module(): """Initialize the module. @@ -294,217 +903,102 @@ def run_module(): fail_json z/OS started task operation failed. """ + start_args = dict( + device_type=dict(type="str", required=False), + device_number=dict(type="str", required=False), + identifier_name=dict(type="str", required=False, aliases=["identifier"]), + job_account=dict(type="str", required=False), + job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), + keyword_parameters=dict(type="dict", required=False, no_log=False), + member_name=dict(type="str", required=False, aliases=["member"]), + parameters=dict(type="list", elements="str", required=False), + reus_asid=dict(type="str", required=False, choices=["YES", "NO"]), + subsystem=dict(type="str", required=False), + volume_serial=dict(type="str", required=False) + ) + display_args = dict( + identifier_name=dict(type="str", required=False, aliases=["identifier"]), + job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]) + ) + modify_args = dict( + identifier_name=dict(type="str", required=False, aliases=["identifier"]), + job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), + parameters=dict(type="list", elements="str", required=False) + ) + cancel_args = dict( + armrestart=dict(type="bool", required=False), + asid=dict(type="str", required=False), + dump=dict(type="bool", required=False), + identifier_name=dict(type="str", required=False, aliases=["identifier"]), + job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), + userid=dict(type="str", required=False) + ) + force_args = dict( + arm=dict(type="bool", required=False), + armrestart=dict(type="bool", required=False), + asid=dict(type="str", required=False), + identifier_name=dict(type="str", required=False, aliases=["identifier"]), + job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), + retry=dict(type="str", required=False, choices=["YES", "NO"]), + tcb_address=dict(type="str", required=False), + userid=dict(type="str", required=False) + ) + stop_args = dict( + asid=dict(type="str", required=False), + identifier_name=dict(type="str", required=False, aliases=["identifier"]), + job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]) + ) + + module_args = dict( + start_task=dict(type="dict", required=False, options=start_args), + stop_task=dict(type="dict", required=False, options=stop_args), + display_task=dict(type="dict", required=False, options=display_args), + modify_task=dict(type="dict", required=False, options=modify_args), + cancel_task=dict(type="dict", required=False, options=cancel_args), + force_task=dict(type="dict", required=False, options=force_args), + verbose=dict(type="bool", required=False, default=False), + wait_time_s=dict(type="int", default=5) + ) + module = AnsibleModule( - argument_spec={ - 'state': { - 'type': 'str', - 'required': True, - 'choices': ['started', 'stopped', 'modified', 'display', 'forced', 'cancelled'] - }, - 'member_name': { - 'type': 'str', - 'required': False, - 'aliases': ['member'] - }, - 'identifier_name': { - 'type': 'str', - 'required': False, - 'aliases': ['identifier'] - }, - 'job_name': { - 'type': 'str', - 'required': False, - 'aliases': ['job', 'task_name', 'task'] - }, - 'job_account': { - 'type': 'str', - 'required': False - }, - 'device_type': { - 'type': 'str', - 'required': False - }, - 'device_number': { # A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. - 'type': 'str', - 'required': False - }, - 'volume_serial': { - 'type': 'str', - 'required': False - }, - 'subsystem_name': { # The name must be 1 - 4 characters - 'type': 'str', - 'required': False - }, - 'reus_asid': { - 'type': 'str', - 'required': False, - 'choices': ['YES', 'NO'] - }, - 'parameters': { - 'type': 'str', - 'required': False - }, - 'keyword_parameters': { - 'type': 'dict', - 'required': False, - 'no_log': False - }, - 'asid': { - 'type': 'str', - 'required': False - }, - 'verbose': { - 'type': 'bool', - 'required': False - }, - 'wait_time_s': { - 'type': 'int', - 'required': False, - 'default': 5 - } - }, + argument_spec=module_args, mutually_exclusive=[ - ['device_number', 'device_type'] + ["start_task", "stop_task", "display_task", "modify_task", "cancel_task", "force_task"] ], supports_check_mode=True ) - args_def = { - 'state': { - 'arg_type': 'str', - 'required': True - }, - 'member_name': { - 'arg_type': 'member_name', - 'required': False, - 'aliases': ['member'] - }, - 'identifier_name': { - 'arg_type': 'identifier_name', - 'required': False, - 'aliases': ['identifier'] - }, - 'job_name': { - 'arg_type': 'str', - 'required': False, - 'aliases': ['job', 'task_name', 'task'] - }, - 'job_account': { - 'arg_type': 'str', - 'required': False - }, - 'device_type': { - 'arg_type': 'str', - 'required': False - }, - 'device_number': { - 'arg_type': 'str', - 'required': False - }, - 'volume_serial': { - 'arg_type': 'str', - 'required': False - }, - 'subsystem_name': { - 'arg_type': 'str', - 'required': False - }, - 'reus_asid': { - 'arg_type': 'str', - 'required': False - }, - 'parameters': { - 'arg_type': 'str', - 'required': False - }, - 'keyword_parameters': { - 'arg_type': 'basic_dict', - 'required': False - }, - 'asid': { - 'arg_type': 'str', - 'required': False - }, - 'verbose': { - 'arg_type': 'bool', - 'required': False - }, - 'wait_time_s': { - 'arg_type': 'int', - 'required': False - } - } - try: - parser = better_arg_parser.BetterArgParser(args_def) - parsed_args = parser.parse_args(module.params) - module.params = parsed_args + parms = parse_and_validate_args(module.params) except ValueError as err: module.fail_json( + rc=5, msg='Parameter verification failed.', stderr=str(err) ) - operation = module.params.get('state') - member = module.params.get('member_name') - identifier = module.params.get('identifier') - job_name = module.params.get('job_name') - job_account = module.params.get('job_account') - asid = module.params.get('asid') - parameters = module.params.get('parameters') - device_type = module.params.get('device_type') - device_number = module.params.get('device_number') - volume_serial = module.params.get('volume_serial') - subsystem_name = module.params.get('subsystem_name') - reus_asid = module.params.get('reus_asid') - keyword_parameters = module.params.get('keyword_parameters') - wait_time_s = module.params.get('wait_time_s') - verbose = module.params.get('verbose') - keyword_parameters_string = None - if keyword_parameters is not None: - # keyword_parameters_string = ','.join(f"{key}={value}" for key, value in keyword_parameters.items()) - for key, value in keyword_parameters.items(): - key_len = len(key) - value_len = len(value) - if key_len > 44 or value_len > 44 or key_len + value_len > 65: - module.fail_json( - msg="The length of a keyword=option is exceeding 66 characters or length of an individual value is exceeding 44 characters. key:{0}, value:{1}".format(key, value), - changed=False - ) - else: - keyword_parameters_string = ','.join(f"{key}={value}") - device = device_type if device_type is not None else device_number + wait_time_s = parms.get('wait_time_s') + verbose = parms.get('verbose') kwargs = {} - start_errmsg = ['ERROR'] + """ + Below error messages are used to detrmine if response has any error.When + response could have any of below error message has explained below. + + ERROR: Response contains this keyword when JCL contains syntax error. + INVALID PARAMETER: When invalid parameter passed in command line. + NOT ACTIVE: When started task with the given job name is not active + REJECTED: When modify command is not supported by respective started task. + NOT LOGGED ON: When invalid userid passed in command. + DUPLICATE NAME FOUND: When multiple started tasks exist with same name. + CANCELABLE: When force command used without using cancel command + """ + start_errmsg = ['ERROR', 'INVALID PARAMETER'] stop_errmsg = ['NOT ACTIVE'] display_errmsg = ['NOT ACTIVE'] modify_errmsg = ['REJECTED', 'NOT ACTIVE'] - cancel_errmsg = ['NOT ACTIVE'] - force_errmsg = ['NOT ACTIVE'] + cancel_errmsg = ['NOT ACTIVE', 'NOT LOGGED ON', 'INVALID PARAMETER', 'DUPLICATE NAME FOUND'] + force_errmsg = ['NOT ACTIVE', 'NOT LOGGED ON', 'INVALID PARAMETER', 'CANCELABLE', 'DUPLICATE NAME FOUND'] err_msg = [] - # Validations - if job_account and len(job_account) > 55: - module.fail_json( - msg="job_account value should not exceed 55 characters.", - changed=False - ) - if device_number: - devnum_len = len(device_number) - if devnum_len not in (3, 5) or (devnum_len == 5 and not device_number.startswith("/")): - module.fail_json( - msg="Invalid device_number.", - changed=False - ) - if subsystem_name and len(subsystem_name) > 4: - module.fail_json( - msg="The subsystem_name must be 1 - 4 characters.", - changed=False - ) - # keywaord arguments validation..... - - wait_s = 5 - use_wait_arg = False if zoau_version_checker.is_zoau_version_higher_than("1.2.4"): use_wait_arg = True @@ -513,77 +1007,48 @@ def run_module(): kwargs.update({"wait": True}) args = [] - cmd = '' - started_task_name = "" - if operation != 'started': - if job_name is not None: - started_task_name = job_name - if identifier is not None: - started_task_name = started_task_name + "." + identifier - else: - module.fail_json( - msg="job_name is missing which is mandatory.", - changed=False - ) + cmd = "" + execute_display_before = False execute_display_after = False - if operation == 'started': - execute_display_after = True - if job_name is not None: - started_task_name = job_name - elif member is not None: - started_task_name = member - if identifier is not None: - started_task_name = started_task_name + "." + identifier - else: - module.fail_json( - msg="member_name is missing which is mandatory.", - changed=False - ) + if parms.get('start_task'): err_msg = start_errmsg - if member is None: - module.fail_json( - msg="member_name is missing which is mandatory.", - changed=False - ) - if job_name is not None and identifier is not None: - module.fail_json( - msg="job_name and identifier_name are mutually exclusive while starting a started task.", - changed=False - ) - cmd = prepare_start_command(member, identifier, job_name, job_account, device, volume_serial, subsystem_name, reus_asid, parameters, keyword_parameters_string) - elif operation == 'display': + execute_display_after = True + started_task_name, cmd = validate_and_prepare_start_command(module, parms.get('start_task')) + elif parms.get('display_task'): err_msg = display_errmsg - cmd = 'd a,' + started_task_name - elif operation == 'stopped': + started_task_name, cmd = prepare_display_command(module, parms.get('display_task')) + elif parms.get('stop_task'): execute_display_before = True err_msg = stop_errmsg - cmd = 'p ' + started_task_name - if asid: - cmd = cmd + ',a=' + asid - elif operation == 'cancelled': + started_task_name, cmd = prepare_stop_command(module, parms.get('stop_task')) + elif parms.get('cancel_task'): execute_display_before = True err_msg = cancel_errmsg - cmd = 'c ' + started_task_name - if asid: - cmd = cmd + ',a=' + asid - elif operation == 'forced': + started_task_name, cmd = prepare_cancel_command(module, parms.get('cancel_task')) + elif parms.get('force_task'): execute_display_before = True err_msg = force_errmsg - cmd = 'force ' + started_task_name - if asid: - cmd = cmd + ',a=' + asid - elif operation == 'modified': + started_task_name, cmd = prepare_force_command(module, parms.get('force_task')) + elif parms.get('modify_task'): execute_display_after = True err_msg = modify_errmsg - cmd = 'f ' + started_task_name + ',' + parameters + started_task_name, cmd = prepare_modify_command(module, parms.get('modify_task')) changed = False stdout = "" stderr = "" - rc, out, err, task_params = execute_command(cmd, started_task_name, execute_display_before, execute_display_after, timeout_s=wait_s, *args, **kwargs) - logs = fetch_logs(cmd.upper()) # it will display both start/display logs - logs_str = "\n".join(logs) - if any(msg in out for msg in err_msg) or any(msg in logs_str for msg in err_msg) or err != "": + rc, out, err, task_params = execute_command(cmd, started_task_name, execute_display_before, execute_display_after, timeout_s=wait_time_s, *args, **kwargs) + isFailed = False + system_logs = "" + if err != "" or any(msg in out for msg in err_msg): + isFailed = True + if not isFailed or verbose: + system_logs = fetch_logs(cmd.upper()) + if any(msg in system_logs for msg in err_msg): + isFailed = True + if isFailed: + if rc == 0: + rc = 1 changed = False stdout = out stderr = err @@ -594,38 +1059,33 @@ def run_module(): changed = True stdout = out stderr = err - if operation == 'display': + if parms.get('display_task'): task_params = extract_keys(out) result = dict() if module.check_mode: module.exit_json(**result) + state = "" - if verbose: - result = dict( - changed=changed, - cmd=cmd, - task=task_params, - rc=rc, - verbose_output=logs_str, - stdout=stdout, - stderr=stderr, - stdout_lines=stdout.split('\n'), - stderr_lines=stderr.split('\n'), - ) - else: - result = dict( + result = dict( changed=changed, cmd=cmd, - task=task_params, + tasks=task_params, rc=rc, stdout=stdout, stderr=stderr, stdout_lines=stdout.split('\n'), stderr_lines=stderr.split('\n'), ) - + if verbose: + result["verbose_output"] = system_logs + if parms.get('display_task') or parms.get('modify_task'): + if len(task_params) > 0 and not isFailed: + result["state"] = "Active" + else: + result["state"] = "NotActive" + module.exit_json(**result) diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index f0f1785bf4..d678ed6ee7 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -39,11 +39,23 @@ /* //PEND""" +# Input arguments validation def test_start_task_with_invalid_member(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - state="started", - member="SAMPLETASK" + start_task={ + "member_name": "SAMTASK" + } + ) + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + + start_results = hosts.all.zos_started_task( + start_task={ + "member_name": "SAMPLETASK" + } ) for result in start_results.contacted.values(): print(result) @@ -51,12 +63,28 @@ def test_start_task_with_invalid_member(ansible_zos_module): assert result.get("failed") is True assert result.get("stderr") is not None +def test_start_task_with_jobname_identifier(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + start_task={ + "member_name": "SAMPLE", + "job_name": "SAMTASK", + "identifier": "TESTER" + } + ) + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + def test_start_task_with_invalid_identifier(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - state="started", - member="SAMPLE", - identifier="$HELLO" + start_task={ + "member_name": "SAMPLE", + "identifier": "$HELLO" + } ) for result in start_results.contacted.values(): @@ -64,14 +92,27 @@ def test_start_task_with_invalid_identifier(ansible_zos_module): assert result.get("changed") is False assert result.get("failed") is True assert result.get("stderr") is not None + + start_results = hosts.all.zos_started_task( + start_task={ + "member_name": "SAMPLE", + "identifier": "HELLO" + } + ) + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "S SAMPLE.HELLO" def test_start_task_with_invalid_jobaccount(ansible_zos_module): hosts = ansible_zos_module job_account = "(T043JM,JM00,1,0,0,This is the invalid job account information to test negative scenario)" start_results = hosts.all.zos_started_task( - state="started", - member="SAMPLE", - job_account=job_account + start_task={ + "member_name": "SAMPLE", + "job_account": job_account + } ) for result in start_results.contacted.values(): @@ -83,22 +124,382 @@ def test_start_task_with_invalid_jobaccount(ansible_zos_module): def test_start_task_with_invalid_devicenum(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - state="started", - member="SAMPLE", - device_number="0870" + start_task={ + "member_name": "SAMPLE", + "device_number": "0870" + } + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + +def test_start_task_with_invalid_volumeserial(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + start_task={ + "member_name": "SAMPLE", + "volume_serial": "12345A" + } + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "S SAMPLE,,12345A" + +def test_start_task_with_invalid_parameters(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + start_task={ + "member_name": "SAMPLE", + "parameters": ["KEY1"] + } + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "S SAMPLE,,,'KEY1'" + + start_results = hosts.all.zos_started_task( + start_task={ + "member_name": "SAMPLE", + "parameters": ["KEY1", "KEY2", "KEY3"], + "volume_serial": "12345" + } + ) + + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "S SAMPLE,,12345,(KEY1,KEY2,KEY3)" + +def test_start_task_with_devicenum_devicetype_negative(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + start_task={ + "member_name": "SAMPLE", + "device_number": "/0870", + "device_type": "TEST" + } + ) + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + +def test_start_task_with_invalid_subsystem_negative(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + start_task={ + "member": "VLF", + "subsystem": "MSTRS" + } ) + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + +def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + start_task={ + "member": "VLF", + "keyword_parameters":{ + "key1key1key1key1key1key1key1key1": "value1value1value1value1value1value1" + } + } + ) + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + start_results = hosts.all.zos_started_task( + start_task={ + "member": "VLF", + "keyword_parameters":{ + "key1key1key1key1key1key1key1key1key1key1key1key1": "value1" + } + } + ) + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + start_results = hosts.all.zos_started_task( + start_task={ + "member": "VLF", + "keyword_parameters":{ + "KEY1": "VALUE1", + "KEY2": "VALUE2" + } + } + ) + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == 'S VLF,KEY1=VALUE1,KEY2=VALUE2' + + +def test_start_task_using_nonexisting_devicenum_negative(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + start_task={ + "member_name": "SAMPLE", + "device_number": "/ABCD" + } + ) for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == 'S SAMPLE,/ABCD' + +def test_display_task_negative(ansible_zos_module): + hosts = ansible_zos_module + display_results = hosts.all.zos_started_task( + display_task={ + "identifier": "SAMPLE" + } + ) + for result in display_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + +def test_start_and_display_task_negative(ansible_zos_module): + hosts = ansible_zos_module + display_results = hosts.all.zos_started_task( + start_task={ + "member": "SAMPLE" + }, + display_task={ + "job": "SAMPLE" + } + ) + for result in display_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + +def test_stop_task_negative(ansible_zos_module): + hosts = ansible_zos_module + stop_results = hosts.all.zos_started_task( + stop_task={ + "identifier": "SAMPLE" + } + ) + for result in stop_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + stop_results = hosts.all.zos_started_task( + stop_task={ + "job_name": "TESTER", + "identifier": "SAMPLE" + } + ) + for result in stop_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "P TESTER.SAMPLE" + +def test_modify_task_negative(ansible_zos_module): + hosts = ansible_zos_module + modify_results = hosts.all.zos_started_task( + modify_task={ + "identifier": "SAMPLE" + } + ) + for result in modify_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + modify_results = hosts.all.zos_started_task( + modify_task={ + "job_name": "TESTER" + } + ) + for result in modify_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + modify_results = hosts.all.zos_started_task( + modify_task={ + "job_name": "TESTER", + "identifier": "SAMPLE", + "parameters": ["REPLACE", "VX=10"] + } + ) + for result in modify_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "F TESTER.SAMPLE,REPLACE,VX=10" + +def test_cancel_task_negative(ansible_zos_module): + hosts = ansible_zos_module + cancel_results = hosts.all.zos_started_task( + cancel_task={ + "identifier": "SAMPLE" + } + ) + for result in cancel_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + cancel_results = hosts.all.zos_started_task( + cancel_task={ + "job_name": "TESTER", + "identifier": "SAMPLE" + } + ) + for result in cancel_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "C TESTER.SAMPLE" + cancel_results = hosts.all.zos_started_task( + cancel_task={ + "userid": "OMVSTEST", + "asid": "0012", + "dump": True + }, + verbose=True + ) + for result in cancel_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "C U=OMVSTEST,A=0012,DUMP" + cancel_results = hosts.all.zos_started_task( + cancel_task={ + "userid": "OMVSADM", + "armrestart": True + } + ) + for result in cancel_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + +def test_force_task_negative(ansible_zos_module): + hosts = ansible_zos_module + force_results = hosts.all.zos_started_task( + force_task={ + "identifier": "SAMPLE" + } + ) + for result in force_results.contacted.values(): print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None + force_results = hosts.all.zos_started_task( + force_task={ + "job_name": "TESTER", + "identifier": "SAMPLE" + } + ) + for result in force_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "FORCE TESTER.SAMPLE" + force_results = hosts.all.zos_started_task( + force_task={ + "userid": "OMVSADM", + "armrestart": True + } + ) + for result in force_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + force_results = hosts.all.zos_started_task( + force_task={ + "job_name": "TESTER", + "retry": "YES" + } + ) + for result in force_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + force_results = hosts.all.zos_started_task( + force_task={ + "job_name": "TESTER", + "tcb_address": "0006789", + "retry": "YES" + } + ) + for result in force_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + force_results = hosts.all.zos_started_task( + force_task={ + "job_name": "TESTER", + "identifier": "SAMPLE", + "tcb_address": "000678", + "retry": "YES" + } + ) + for result in force_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "FORCE TESTER.SAMPLE,TCB=000678,RETRY=YES" + force_results = hosts.all.zos_started_task( + force_task={ + "userid": "OMVSTEST", + "tcb_address": "000678", + "retry": "YES" + }, + verbose=True + ) + for result in force_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "FORCE U=OMVSTEST,TCB=000678,RETRY=YES" + + def test_start_and_cancel_zos_started_task(ansible_zos_module): try: hosts = ansible_zos_module data_set_name = get_tmp_ds_name() - print(data_set_name) temp_path = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.file(path=temp_path, state="directory") hosts.all.shell( @@ -114,8 +515,10 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): ) start_results = hosts.all.zos_started_task( - state="started", - member="SAMPLE" + start_task={ + "member_name": "SAMPLE" + }, + verbose=True ) for result in start_results.contacted.values(): @@ -123,10 +526,25 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") is not None + + force_results = hosts.all.zos_started_task( + force_task={ + "task_name": "SAMPLE" + } + ) + for result in force_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "FORCE SAMPLE" + assert "CANCELABLE - ISSUE CANCEL BEFORE FORCE" in result.get("stderr") stop_results = hosts.all.zos_started_task( - state="cancelled", - task_name="SAMPLE" + cancel_task={ + "task_name": "SAMPLE" + } ) for result in stop_results.contacted.values(): @@ -134,12 +552,58 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") is None + + # validate identifier + start_results = hosts.all.zos_started_task( + start_task={ + "member_name": "SAMPLE", + "identifier": "TESTER", + "reus_asid": "YES" + } + ) + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") is None + assert result.get("cmd") == "S SAMPLE.TESTER,REUSASID=YES" + + stop_results = hosts.all.zos_started_task( + cancel_task={ + "task_name": "SAMPLE" + } + ) + for result in stop_results.contacted.values(): + print(result) + assert result.get("changed") is False + assert result.get("stderr") is not None + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") is None + + stop_results = hosts.all.zos_started_task( + cancel_task={ + "task_name": "SAMPLE", + "identifier": "TESTER" + } + ) + for result in stop_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") is None job_account = "(T043JM,JM00,1,0,0,)" start_results = hosts.all.zos_started_task( - state="started", - member="SAMPLE", - job_account=job_account + start_task={ + "member": "SAMPLE", + "job_account": job_account + } ) for result in start_results.contacted.values(): @@ -147,23 +611,31 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") is None display_result = hosts.all.zos_started_task( - state="display", - task_name="SAMPLE" + display_task={ + "task_name": "SAMPLE" + } ) for result in display_result.contacted.values(): print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") is None + display_output = list(display_result.contacted.values())[0].get("stdout") asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) stop_results = hosts.all.zos_started_task( - state="cancelled", - task_name="SAMPLE", - asid=asid_val + cancel_task={ + "task_name": "SAMPLE", + "asid": asid_val + }, + verbose=True ) for result in stop_results.contacted.values(): @@ -171,6 +643,8 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") is not None finally: hosts.all.file(path=temp_path, state="absent") @@ -201,9 +675,10 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): ) start_results = hosts.all.zos_started_task( - state="started", - member="SAMPLE", - job_name="TESTTSK" + start_task={ + "member": "SAMPLE", + "job_name": "TESTTSK" + } ) for result in start_results.contacted.values(): @@ -213,8 +688,9 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): assert result.get("stderr") == "" stop_results = hosts.all.zos_started_task( - state="cancelled", - task_name="TESTTSK" + cancel_task={ + "task_name": "TESTTSK" + } ) for result in stop_results.contacted.values(): @@ -234,24 +710,55 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): def test_stop_and_modify_with_vlf_task(ansible_zos_module): hosts = ansible_zos_module - - stop_results = hosts.all.zos_started_task( - state="stopped", - task_name="vlf" + modify_results = hosts.all.zos_started_task( + modify_task={ + "task_name": "VLF", + "parameters": ["REPLACE" ,"NN=00"] + } + ) + for result in modify_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert result.get("cmd") == "F VLF,REPLACE,NN=00" + + display_result = hosts.all.zos_started_task( + display_task={ + "task_name": "VLF" + } ) + for result in display_result.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") is None + + display_output = list(display_result.contacted.values())[0].get("stdout") + asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) + stop_results = hosts.all.zos_started_task( + stop_task={ + "task_name": "VLF", + "asid": asid_val + } + ) for result in stop_results.contacted.values(): print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" + assert result.get("cmd") == f"P VLF,A={asid_val}" start_results = hosts.all.zos_started_task( - state="started", - member="vlf", - subsystem_name="mstr" + start_task={ + "member": "VLF", + "identifier": "TESTER", + "subsystem": "MSTR" + } ) - for result in start_results.contacted.values(): print(result) assert result.get("changed") is True @@ -259,17 +766,43 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): assert result.get("stderr") == "" modify_results = hosts.all.zos_started_task( - state="modified", - task_name="vlf", - parameters="replace,nn=00" + modify_task={ + "task_name": "VLF", + "identifier": "TESTER", + "parameters": ["REPLACE" ,"NN=00"] + } ) - for result in modify_results.contacted.values(): print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" + assert result.get("cmd") == "F VLF.TESTER,REPLACE,NN=00" + stop_results = hosts.all.zos_started_task( + stop_task={ + "task_name": "VLF", + "identifier": "TESTER" + } + ) + for result in stop_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + start_results = hosts.all.zos_started_task( + start_task={ + "member": "VLF", + "subsystem": "MSTR" + } + ) + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): try: @@ -291,9 +824,10 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): ) start_results = hosts.all.zos_started_task( - state="started", - member="SAMPLE2", - job_name="SPROC", + start_task={ + "member": "SAMPLE2", + "job_name": "SPROC" + }, verbose=True ) @@ -304,8 +838,9 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): assert result.get("stderr") == "" stop_results = hosts.all.zos_started_task( - state="cancelled", - task_name="SPROC" + cancel_task={ + "task_name": "SPROC" + } ) for result in stop_results.contacted.values(): From 4667f8741410588e2b38dcb3209544462a38f5bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 9 Sep 2025 15:01:21 -0500 Subject: [PATCH 34/73] [Enhancement][2160]update_zos_unarchive_interface (#2271) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update interface * False debug * Add names to action plugin * Modify fragment --------- Co-authored-by: André Marcel Gutiérrez Benítez --- .../2256-zos_archive-interface-update.yml | 2 +- .../2271-update_zos_unarchive_interface.yml | 6 + plugins/action/zos_unarchive.py | 12 +- plugins/modules/zos_unarchive.py | 65 ++++----- .../modules/test_zos_unarchive_func.py | 127 ++++++++++++------ 5 files changed, 131 insertions(+), 81 deletions(-) create mode 100644 changelogs/fragments/2271-update_zos_unarchive_interface.yml diff --git a/changelogs/fragments/2256-zos_archive-interface-update.yml b/changelogs/fragments/2256-zos_archive-interface-update.yml index 7a0d576d6d..bd8c76bfe3 100644 --- a/changelogs/fragments/2256-zos_archive-interface-update.yml +++ b/changelogs/fragments/2256-zos_archive-interface-update.yml @@ -1,5 +1,5 @@ breaking_changes: - - zos_lineinfile - Option ``format.format_options`` is deprecated in favor of ``format.options``. + - zos_archive - Option ``format.format_options`` is deprecated in favor of ``format.options``. Option ``format.format_options.name`` is deprecated in favor of ``format.options.type``. Option ``format.format_options.use_adrdssu`` is deprecated in favor of ``format.options.use_adrdssu``. Option ``format.format_options.terse_pack`` is deprecated in favor of ``format.options.spack`` as a new boolean option. diff --git a/changelogs/fragments/2271-update_zos_unarchive_interface.yml b/changelogs/fragments/2271-update_zos_unarchive_interface.yml new file mode 100644 index 0000000000..10c50e8a5e --- /dev/null +++ b/changelogs/fragments/2271-update_zos_unarchive_interface.yml @@ -0,0 +1,6 @@ +breaking_changes: + - zos_unarchive - Option ``format.format_options`` is deprecated in favor of ``format.options``. + Option ``format.format_options.name`` is deprecated in favor of ``format.options.type``. + Option ``format.format_options.use_adrdssu`` is deprecated in favor of ``format.options.use_adrdssu``. + Return values ``stdout_lines`` and ``stderr_lines`` is added. + (https://github.com/ansible-collections/ibm_zos_core/pull/2271). \ No newline at end of file diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index a6670ea7b3..ef22800107 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -63,7 +63,7 @@ def run(self, tmp=None, task_vars=None): source = module_args.get("src") force = _process_boolean(module_args.get("force")) format = self._task.args.get("format") - format_name = format.get("name") + format_type = format.get("type") copy_module_args = dict() dest_data_set = format.get("dest_data_set") dest = "" @@ -71,12 +71,12 @@ def run(self, tmp=None, task_vars=None): source = os.path.expanduser(source) source = os.path.realpath(source) - if format_name in USS_SUPPORTED_FORMATS: + if format_type in USS_SUPPORTED_FORMATS: tmp_files = dest = self._execute_module( module_name="tempfile", module_args={}, task_vars=task_vars, ).get("path") - uss_format = format_name - elif format_name in MVS_SUPPORTED_FORMATS: + uss_format = format_type + elif format_type in MVS_SUPPORTED_FORMATS: if dest_data_set is None: dest_data_set = dict() tmp_hlq = module_args.get("tmp_hlq") if module_args.get("tmp_hlq") is not None else "" @@ -90,9 +90,9 @@ def run(self, tmp=None, task_vars=None): dest = cmd_res.get("stdout") if dest_data_set.get("space_primary") is None: dest_data_set.update(space_primary=5, space_type="m") - if format_name == 'terse': + if format_type == 'terse': dest_data_set.update(type='seq', record_format='fb', record_length=1024) - if format_name == 'xmit': + if format_type == 'xmit': dest_data_set.update(type='seq', record_format='fb', record_length=80) copy_module_args.update( diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 970d789a6b..b8dc1ac2a8 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -46,7 +46,7 @@ type: dict required: true suboptions: - name: + type: description: - The compression format used while archiving. type: str @@ -59,7 +59,7 @@ - terse - xmit - pax - format_options: + options: description: - Options specific to a compression format. type: dict @@ -76,7 +76,7 @@ - When providing the I(xmit_log_data_set) name, ensure there is adequate space. type: str - use_adrdssu: + adrdssu: description: - If set to true, the C(zos_unarchive) module will use Data Facility Storage Management Subsystem data set services @@ -86,7 +86,7 @@ default: False dest_volumes: description: - - When I(use_adrdssu=True), specify the volume the data sets + - When I(adrdssu=True), specify the volume the data sets will be written to. - If no volume is specified, storage management rules will be used to determine the volume where the file will be @@ -378,14 +378,14 @@ zos_unarchive: src: "./files/archive_folder_test.tar" format: - name: tar + type: tar # use include - name: Unarchive a bzip file selecting only a file to unpack. zos_unarchive: src: "/tmp/test.bz2" format: - name: bz2 + type: bz2 include: - 'foo.txt' @@ -394,7 +394,7 @@ zos_unarchive: src: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse exclude: - USER.ARCHIVE.TEST1 - USER.ARCHIVE.TEST2 @@ -404,16 +404,16 @@ zos_unarchive: src: "USER.ARCHIVE(0)" format: - name: terse + type: terse # List option - name: List content from XMIT zos_unarchive: src: "USER.ARCHIVE.RESULT.XMIT" format: - name: xmit - format_options: - use_adrdssu: true + type: xmit + options: + adrdssu: true list: true # Encoding example @@ -421,7 +421,7 @@ zos_unarchive: src: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse encoding: from: IBM-1047 to: ISO8859-1 @@ -430,7 +430,7 @@ zos_unarchive: src: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse encoding: from: IBM-1047 to: ISO8859-1 @@ -522,7 +522,7 @@ def __init__(self, module): Destination of the unarchive. format : str Name of the format of the module. - format_options : list[str] + options : list[str] Options of the format of the module. tmphql : str High level qualifier for temporary datasets. @@ -552,8 +552,8 @@ def __init__(self, module): self.module = module self.src = module.params.get("src") self.dest = module.params.get("dest") - self.format = module.params.get("format").get("name") - self.format_options = module.params.get("format").get("format_options") + self.format = module.params.get("format").get("type") + self.options = module.params.get("format").get("options") self.tmphlq = module.params.get("tmp_hlq") self.force = module.params.get("force") self.targets = list() @@ -888,7 +888,7 @@ def __init__(self, module): ---------- volumes : list[str] List of destination volumes. - use_adrdssu : bool + adrdssu : bool Whether to use Data Facility Storage Management Subsystem data set services program ADRDSSU to uncompress data sets or not. dest_dat_set : dict @@ -897,8 +897,8 @@ def __init__(self, module): Source size. """ super(MVSUnarchive, self).__init__(module) - self.volumes = self.format_options.get("dest_volumes") - self.use_adrdssu = self.format_options.get("use_adrdssu") + self.volumes = self.options.get("dest_volumes") + self.adrdssu = self.options.get("adrdssu") self.dest_data_set = module.params.get("dest_data_set") self.dest_data_set = dict() if self.dest_data_set is None else self.dest_data_set self.source_size = 0 @@ -1115,9 +1115,10 @@ def _restore(self, source): self.clean_environment(data_sets=[source], uss_files=[], remove_targets=True) self.module.fail_json( msg="Failed executing ADRDSSU to unarchive {0}. List of data sets not restored : {1}".format(source, unrestored_data_sets), - stdout=out, + stdout=f"command: {restore_cmd} \n stdout:{out}", stderr=err, - stdout_lines=restore_cmd, + stdout_lines=f"command: {restore_cmd} \n stdout:{out}".splitlines(), + stderr_lines=err.splitlines(), rc=rc, ) return rc @@ -1176,7 +1177,7 @@ def extract_src(self): """ temp_ds = "" - if not self.use_adrdssu: + if not self.adrdssu: temp_ds, rc = self._create_dest_data_set(**self.dest_data_set) rc = self.unpack(self.src, temp_ds) self.targets = [temp_ds] @@ -1322,6 +1323,8 @@ def unpack(self, src, dest): msg="Failed executing AMATERSE to restore {0} into {1}".format(src, dest), stdout=out, stderr=err, + stdout_lines=out.splitlines(), + stderr_lines=err.splitlines(), rc=rc, ) return rc @@ -1369,6 +1372,8 @@ def unpack(self, src, dest): msg="Failed executing RECEIVE to restore {0} into {1}".format(src, dest), stdout=out, stderr=err, + stdout_lines=out.splitlines(), + stderr_lines=err.splitlines(), rc=rc, ) return rc @@ -1391,7 +1396,7 @@ def get_unarchive_handler(module): ZipUnarchive The appropriate object type for any other format. """ - format = module.params.get("format").get("name") + format = module.params.get("format").get("type") if format in ["tar", "gz", "bz2", "pax"]: return TarUnarchive(module) elif format == "terse": @@ -1594,12 +1599,12 @@ def run_module(): type='dict', required=True, options=dict( - name=dict( + type=dict( type='str', required=True, choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] ), - format_options=dict( + options=dict( type='dict', required=False, options=dict( @@ -1611,7 +1616,7 @@ def run_module(): type='list', elements='str', ), - use_adrdssu=dict( + adrdssu=dict( type='bool', default=False, ) @@ -1698,13 +1703,13 @@ def run_module(): type='dict', required=True, options=dict( - name=dict( + type=dict( type='str', required=True, default='gz', choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] ), - format_options=dict( + options=dict( type='dict', required=False, options=dict( @@ -1716,7 +1721,7 @@ def run_module(): type='list', elements='str' ), - use_adrdssu=dict( + adrdssu=dict( type='bool', default=False, ), @@ -1724,7 +1729,7 @@ def run_module(): default=dict(xmit_log_data_set=""), ) ), - default=dict(name="", format_options=dict(xmit_log_data_set="")), + default=dict(type="", options=dict(xmit_log_data_set="")), ), dest_data_set=dict( arg_type='dict', diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index c9ec479091..910e08eb56 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -62,7 +62,7 @@ ibm.ibm_zos_core.zos_unarchive: src: {3} format: - name: {4} + type: {4} remote_src: True async: 45 poll: 0 @@ -156,7 +156,7 @@ def test_uss_unarchive(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, remote_src=True, ) @@ -165,6 +165,8 @@ def test_uss_unarchive(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None # Command to assert the file is in place cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") for c_result in cmd_result.contacted.values(): @@ -197,7 +199,7 @@ def test_uss_unarchive_include(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, include=include_list, remote_src=True, @@ -206,6 +208,8 @@ def test_uss_unarchive_include(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None # Command to assert the file is in place cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") for c_result in cmd_result.contacted.values(): @@ -241,7 +245,7 @@ def test_uss_unarchive_exclude(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, exclude=exclude_list, remote_src=True, @@ -249,6 +253,9 @@ def test_uss_unarchive_exclude(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False + assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None # Command to assert the file is in place cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") for c_result in cmd_result.contacted.values(): @@ -282,7 +289,7 @@ def test_uss_unarchive_list(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, remote_src=True, ) @@ -290,6 +297,8 @@ def test_uss_unarchive_list(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None for file in USS_TEST_FILES.keys(): assert file[len(USS_TEMP_DIR)+1:] in result.get("targets") finally: @@ -318,7 +327,7 @@ def test_uss_single_unarchive_with_mode(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, remote_src=True, mode=dest_mode, @@ -326,6 +335,8 @@ def test_uss_single_unarchive_with_mode(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None dest_files = list(USS_TEST_FILES.keys()) for file in dest_files: stat_dest_res = hosts.all.stat(path=file) @@ -360,7 +371,7 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module): src=tar_file, dest=USS_TEMP_DIR, format={ - "name":"tar" + "type":"tar" }, force=True, ) @@ -368,6 +379,8 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None # Command to assert the file is in place cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}/{tmp_file.name}") for c_result in cmd_result.contacted.values(): @@ -404,7 +417,7 @@ def test_uss_unarchive_encoding(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, remote_src=True, encoding= encoding, @@ -414,6 +427,8 @@ def test_uss_unarchive_encoding(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None # Command to assert the file is in place cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") for c_result in cmd_result.contacted.values(): @@ -453,7 +468,7 @@ def test_uss_unarchive_encoding_skip_encoding(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, remote_src=True, encoding= encoding, @@ -463,6 +478,8 @@ def test_uss_unarchive_encoding_skip_encoding(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None # Command to assert the file is in place cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") for c_result in cmd_result.contacted.values(): @@ -581,7 +598,7 @@ def test_mvs_unarchive_single_data_set( hosts.all.shell(cmd=f"drm '{dataset}'") unarchive_format_dict = { - "name": ds_format, + "type": ds_format, } # Unarchive action unarchive_result = hosts.all.zos_unarchive( @@ -599,6 +616,8 @@ def test_mvs_unarchive_single_data_set( for result in unarchive_result.contacted.values(): assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("targets") is not None + assert result.get("missing") is not None # assert result.get("dest") == mvs_dest_archive # assert data_set.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = f"""dls "{hlq}.*" """) @@ -676,11 +695,11 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( format_dict = { "type":ds_format } - format_dict["format_options"] = { + format_dict["options"] = { "adrdssu":True } if ds_format == "terse": - format_dict["format_options"].update(spack=True) + format_dict["options"].update(spack=True) archive_result = hosts.all.zos_archive( src=dataset, dest=mvs_dest_archive, @@ -698,9 +717,9 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( hosts.all.shell(cmd=f"drm '{dataset}'") unarchive_format_dict = { - "name": ds_format, - "format_options": { - "use_adrdssu": True, + "type": ds_format, + "options": { + "adrdssu": True, } } # Unarchive action @@ -714,6 +733,8 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( for result in unarchive_result.contacted.values(): assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("targets") is not None + assert result.get("missing") is not None # assert result.get("dest") == mvs_dest_archive # assert data_set.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = f"""dls "{hlq}.*" """) @@ -786,9 +807,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, ds_form hosts.all.shell(cmd=f"drm {dataset}*") unarchive_format_dict = { - "name": ds_format, - "format_options": { - "use_adrdssu": True, + "type": ds_format, + "options": { + "adrdssu": True, } } # Unarchive action @@ -803,6 +824,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, ds_form assert result.get("changed") is True assert result.get("failed", False) is False assert result.get("src") == mvs_dest_archive + assert result.get("targets") is not None + assert result.get("missing") is not None cmd_result = hosts.all.shell(cmd=f"""dls "{hlq}.*" """) for c_result in cmd_result.contacted.values(): @@ -883,9 +906,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include( hosts.all.shell(cmd=f"drm {dataset}*") unarchive_format_dict = { - "name": ds_format, - "format_options": { - "use_adrdssu": True, + "type": ds_format, + "options": { + "adrdssu": True, } } # Unarchive action @@ -902,6 +925,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include( assert result.get("changed") is True assert result.get("failed", False) is False assert result.get("src") == mvs_dest_archive + assert result.get("targets") is not None + assert result.get("missing") is not None cmd_result = hosts.all.shell(cmd=f"""dls "{hlqua}.*" """) for c_result in cmd_result.contacted.values(): @@ -983,9 +1008,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude( hosts.all.shell(cmd=f""" drm "{dataset}*" """) unarchive_format_dict = { - "name": ds_format, - "format_options": { - "use_adrdssu": True, + "type": ds_format, + "options": { + "adrdssu": True, } } # Unarchive action @@ -1001,6 +1026,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude( assert result.get("changed") is True assert result.get("failed", False) is False assert result.get("src") == mvs_dest_archive + assert result.get("targets") is not None + assert result.get("missing") is not None cmd_result = hosts.all.shell(cmd=f""" dls "{hlqua}.*" """) for c_result in cmd_result.contacted.values(): @@ -1078,9 +1105,9 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, ds_format, dat hosts.all.shell(cmd=f"drm {dataset}*") unarchive_format_dict = { - "name": ds_format, - "format_options": { - "use_adrdssu": True, + "type": ds_format, + "options": { + "adrdssu": True, } } # Unarchive action @@ -1095,6 +1122,8 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, ds_format, dat assert result.get("changed") is False assert result.get("failed", False) is False assert result.get("src") == mvs_dest_archive + assert result.get("targets") is not None + assert result.get("missing") is not None cmd_result = hosts.all.shell(cmd=f"""dls "{hlq}.*" """) for c_result in cmd_result.contacted.values(): @@ -1180,9 +1209,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force( ) unarchive_format_dict = { - "name": ds_format, - "format_options": { - "use_adrdssu": True, + "type": ds_format, + "options": { + "adrdssu": True, } } # Unarchive action @@ -1198,6 +1227,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force( assert result.get("changed") is True assert result.get("failed", False) is False assert result.get("src") == mvs_dest_archive + assert result.get("targets") is not None + assert result.get("missing") is not None cmd_result = hosts.all.shell(cmd=f"""dls "{hlqua}.*" """) for c_result in cmd_result.contacted.values(): @@ -1306,9 +1337,9 @@ def test_mvs_unarchive_single_data_set_remote_src( source_path = res.get("dest") unarchive_format_dict = { - "name": ds_format, - "format_options": { - "use_adrdssu": True + "type": ds_format, + "options": { + "adrdssu": True } } # Unarchive action @@ -1321,6 +1352,8 @@ def test_mvs_unarchive_single_data_set_remote_src( for result in unarchive_result.contacted.values(): assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("targets") is not None + assert result.get("missing") is not None # assert result.get("dest") == mvs_dest_archive # assert data_set.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = f"""dls "{hlq}.*" """) @@ -1420,7 +1453,7 @@ def test_mvs_unarchive_encoding( hosts.all.shell(cmd=f" drm '{dataset}' ") unarchive_format_dict = { - "name": ds_format, + "type": ds_format, } # Unarchive action unarchive_result = hosts.all.zos_unarchive( @@ -1439,6 +1472,8 @@ def test_mvs_unarchive_encoding( for result in unarchive_result.contacted.values(): assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("targets") is not None + assert result.get("missing") is not None # assert result.get("dest") == mvs_dest_archive # assert data_set.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = f"""dls "{hlq}.*" """) @@ -1533,7 +1568,7 @@ def test_mvs_unarchive_encoding_skip_encoding( hosts.all.shell(cmd=f" drm '{dataset}' ") unarchive_format_dict = { - "name": ds_format, + "type": ds_format, } # skipping some files to encode @@ -1558,6 +1593,8 @@ def test_mvs_unarchive_encoding_skip_encoding( for result in unarchive_result.contacted.values(): assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("targets") is not None + assert result.get("missing") is not None # assert result.get("dest") == mvs_dest_archive # assert data_set.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = f"""dls "{hlq}.*" """) @@ -1636,9 +1673,9 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_encoding(ansible_zos_module hosts.all.shell(cmd=f"drm {dataset}*") unarchive_format_dict = { - "name": ds_format, - "format_options": { - "use_adrdssu": True, + "type": ds_format, + "options": { + "adrdssu": True, } } # Unarchive action @@ -1654,6 +1691,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_encoding(ansible_zos_module assert result.get("changed") is True assert result.get("failed", False) is False assert result.get("src") == mvs_dest_archive + assert result.get("targets") is not None + assert result.get("missing") is not None cmd_result = hosts.all.shell(cmd=f"""dls "{hlq}.*" """) for c_result in cmd_result.contacted.values(): @@ -1673,10 +1712,10 @@ def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): source_path = "/tmp/OMVSADM.NULL" unarchive_format_dict = { - "name":'terse' + "type":'terse' } - unarchive_format_dict["format_options"] = { - "use_adrdssu":True + unarchive_format_dict["options"] = { + "adrdssu":True } # Unarchive action @@ -1755,9 +1794,9 @@ def test_gdg_unarchive(ansible_zos_module, dstype, ds_format): assert archive_data_set in c_result.get("stdout") hosts.all.shell(cmd=f"drm '{data_set_name}(-1)' && drm '{data_set_name}(0)'") unarchive_format_dict = { - "name": ds_format, - "format_options": { - "use_adrdssu": True + "type": ds_format, + "options": { + "adrdssu": True } } unarchive_result = hosts.all.zos_unarchive( From 9c244a1f9ddcce5c7e3d3e68cd3dbbdc288b9851 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Wed, 10 Sep 2025 21:44:50 +0530 Subject: [PATCH 35/73] Adding generic parameters support --- plugins/modules/zos_started_task.py | 973 +++++++++--------- .../modules/test_zos_started_task_func.py | 506 ++++----- 2 files changed, 717 insertions(+), 762 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 9b8ebd9d2e..8ef0f4a920 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -27,274 +27,134 @@ - start, display, modify, cancel, force and stop a started task options: - start_task: + arm: description: - - The start operation of the started task. + - I(arm) indicates to execute normal task termination routines without causing address space destruction. required: false - type: dict - suboptions: - device_type: - description: - - I(device_type) is the type of the output device (if any) associated with the task. - required: false - type: str - device_number: - description: - - I(device_number) is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. - A slash (/) must precede a 4-digit number but is not before a 3-digit number. - required: false - type: str - identifier_name: - description: - - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. - The first character must be alphabetical. - required: false - type: str - aliases: - - identifier - job_account: - description: - - I(job_account) specifies accounting data in the JCL JOB statement for the started task. - If the source JCL was a job and has already accounting data, the value that is specified on this parameter - overrides the accounting data in the source JCL. - required: false - type: str - job_name: - description: - - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, - then member_name is used as job_name. - required: false - type: str - aliases: - - job - - task - - task_name - keyword_parameters: - description: - - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. - The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than - 44 characters in length. - required: false - type: dict - member_name: - description: - - I(member_name) is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL - for the task to be started. The member can be either a job or a cataloged procedure. - required: false - type: str - aliases: - - member - parameters: - description: - - Program parameters passed to the started program, which might be a list in parentheses or a string in single quotation marks - required: false - type: list - elements: str - reus_asid: - description: - - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, - a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified - on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. - required: false - type: str - choices: - - 'YES' - - 'NO' - subsystem: - description: - - The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, - which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. - required: false - type: str - volume_serial: - description: - - If devicetype is a tape or direct-access device, the volume serial number of the volume is mounted on the device. - required: false - type: str - display_task: + type: bool + armrestart: description: - - The display operation of the started task. + - I(armrestart) indicates to restart a started task automatically after the cancel completes. required: false - type: dict - suboptions: - identifier_name: - description: - - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. - The first character must be alphabetical. - required: false - type: str - aliases: - - identifier - job_name: - description: - - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, - then member_name is used as job_name. - required: false - type: str - aliases: - - job - - task - - task_name - modify_task: + type: bool + asid: description: - - The modify operation of the started task. + - I(asid) is a unique address space identifier which gets assigned to each running started task. required: false - type: dict - suboptions: - identifier_name: - description: - - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. - The first character must be alphabetical. - required: false - type: str - aliases: - - identifier - job_name: - description: - - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, - then member_name is used as job_name. - required: false - type: str - aliases: - - job - - task - - task_name - parameters: - description: - - Program parameters passed to the started program, which might be a list in parentheses or a string in single quotation marks - required: false - type: list - elements: str - cancel_task: + type: str + device_type: description: - - The cancel operation of the started task. + - I(device_type) is the type of the output device (if any) associated with the task. required: false - type: dict - suboptions: - armrestart: - description: - - I(armrestart) indicates to restart a started task automatically after the cancel completes. - required: false - type: bool - asid: - description: - - I(asid) is a unique address space identifier which gets assigned to each running started task. - required: false - type: str - dump: - description: - - I(dump) indicates to take dump before ending a started task. - required: false - type: bool - identifier_name: - description: - - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. - The first character must be alphabetical. - required: false - type: str - aliases: - - identifier - job_name: - description: - - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, - then member_name is used as job_name. - required: false - type: str - aliases: - - job - - task - - task_name - userid: - description: - - I(userid) is the user ID of the time-sharing user you want to cancel. - required: false - type: str - force_task: + type: str + device_number: description: - - The force operation of the started task. + - I(device_number) is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. + A slash (/) must precede a 4-digit number but is not before a 3-digit number. required: false - type: dict - suboptions: - arm: - description: - - I(arm) indicates to execute normal task termination routines without causing address space destruction. - required: false - type: bool - armrestart: - description: - - I(armrestart) indicates to restart a started task automatically after the cancel completes. - required: false - type: bool - asid: - description: - - I(asid) is a unique address space identifier which gets assigned to each running started task. - required: false - type: str - identifier_name: - description: - - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. - The first character must be alphabetical. - required: false - type: str - aliases: - - identifier - job_name: - description: - - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, - then member_name is used as job_name. - required: false - type: str - aliases: - - job - - task - - task_name - retry: - description: - - I(retry) is applicable for only FORCE TCB. - required: false - type: str - choices: - - 'YES' - - 'NO' - tcb_address: - description: - - I(tcb_address) is a 6-digit hexadecimal TCB address of the task to terminate. - required: false - type: str - userid: - description: - - I(userid) is the user ID of the time-sharing user you want to cancel. - required: false - type: str - stop_task: + type: str + dump: + description: + - I(dump) indicates to take dump before ending a started task. + required: false + type: bool + identifier_name: description: - - The stop operation of the started task. + - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. + The first character must be alphabetical. + required: false + type: str + aliases: + - identifier + job_account: + description: + - I(job_account) specifies accounting data in the JCL JOB statement for the started task. + If the source JCL was a job and has already accounting data, the value that is specified on this parameter + overrides the accounting data in the source JCL. + required: false + type: str + job_name: + description: + - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, + then member_name is used as job_name. + required: false + type: str + aliases: + - job + - task + - task_name + keyword_parameters: + description: + - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. + The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than + 44 characters in length. required: false type: dict - suboptions: - asid: - description: - - I(asid) is a unique address space identifier which gets assigned to each running started task. - required: false - type: str - identifier_name: - description: - - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. - The first character must be alphabetical. - required: false - type: str - aliases: - - identifier - job_name: - description: - - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, - then member_name is used as job_name. - required: false - type: str - aliases: - - job - - task - - task_name + member_name: + description: + - I(member_name) is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL + for the task to be started. The member can be either a job or a cataloged procedure. + required: false + type: str + aliases: + - member + parameters: + description: + - Program parameters passed to the started program, which might be a list in parentheses or a string in single quotation marks + required: false + type: list + elements: str + retry: + description: + - I(retry) is applicable for only FORCE TCB. + required: false + type: str + choices: + - 'YES' + - 'NO' + reus_asid: + description: + - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, + a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified + on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. + required: false + type: str + choices: + - 'YES' + - 'NO' + state: + description: + - The final state desired for specified started task. + required: True + type: str + choices: + - started + - displayed + - modified + - cancelled + - stopped + - forced + subsystem: + description: + - The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, + which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. + required: false + type: str + tcb_address: + description: + - I(tcb_address) is a 6-digit hexadecimal TCB address of the task to terminate. + required: false + type: str + volume_serial: + description: + - If devicetype is a tape or direct-access device, the volume serial number of the volume is mounted on the device. + required: false + type: str + userid: + description: + - I(userid) is the user ID of the time-sharing user you want to cancel. + required: false + type: str verbose: description: - Return System logs that describe the task's execution. @@ -306,7 +166,7 @@ default: 0 type: int description: - - Option I(wait_time_s) is the the maximum amount of time, in centiseconds (0.01s), to wait for a response after submitting + - Option I(wait_time_s) is the the maximum amount of time, in seconds, to wait for a response after submitting the console command. Default value of 0 means to wait the default amount of time supported by the opercmd utility. """ EXAMPLES = r""" @@ -317,12 +177,76 @@ """ RETURN = r""" +changed: + description: + True if the state was changed, otherwise False. + returned: always + type: bool +cmd: + description: Command executed via opercmd. + returned: changed + type: str + sample: S SAMPLE +msg: + description: Failure or skip message returned by the module. + returned: failure or skipped + type: str + sample: + File /u/user/file.txt is already missing on the system, skipping script +rc: + description: + - The return code is 0 when command executed successfully. + - The return code is 1 when opercmd throws any error. + - The return code is 5 when any parameter validation failed. + returned: changed + type: int + sample: 0 +state: + description: The final state of the started task, after execution.. + returned: changed + type: str + sample: S SAMPLE +tasks: + description: + The output information for a list of started tasks matching specified criteria. + If no started task is found then this will return empty. + returned: success + type: list + elements: dict + contains: + job_name: + description: + The name of the batch job. + type: str + sample: LINKJOB +stdout: + description: The STDOUT from the command, may be empty. + returned: changed + type: str + sample: ISF031I CONSOLE OMVS0000 ACTIVATED. +stderr: + description: The STDERR from the command, may be empty. + returned: changed + type: str + sample: An error has ocurred. +stdout_lines: + description: List of strings containing individual lines from STDOUT. + returned: changed + type: list + sample: ["Allocation to SYSEXEC completed."] +stderr_lines: + description: List of strings containing individual lines from STDERR. + returned: changed + type: list + sample: ["An error has ocurred"] """ from ansible.module_utils.basic import AnsibleModule import traceback import re +from datetime import datetime, timedelta +import re from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser ) @@ -403,7 +327,7 @@ def execute_display_command(started_task_name, timeout_s): return task_params -def validate_and_prepare_start_command(module, start_parms): +def validate_and_prepare_start_command(module): """Validates parameters and creates start command Parameters @@ -418,17 +342,17 @@ def validate_and_prepare_start_command(module, start_parms): cmd The start command in string format. """ - member = start_parms.get('member_name') - identifier = start_parms.get('identifier_name') - job_name = start_parms.get('job_name') - job_account = start_parms.get('job_account') - parameters = start_parms.get('parameters') or [] - device_type = start_parms.get('device_type') or "" - device_number = start_parms.get('device_number') or "" - volume_serial = start_parms.get('volume_serial') or "" - subsystem_name = start_parms.get('subsystem') - reus_asid = start_parms.get('reus_asid') - keyword_parameters = start_parms.get('keyword_parameters') + member = module.params.get('member_name') + identifier = module.params.get('identifier_name') + job_name = module.params.get('job_name') + job_account = module.params.get('job_account') + parameters = module.params.get('parameters') or [] + device_type = module.params.get('device_type') or "" + device_number = module.params.get('device_number') or "" + volume_serial = module.params.get('volume_serial') or "" + subsystem_name = module.params.get('subsystem') + reus_asid = module.params.get('reus_asid') + keyword_parameters = module.params.get('keyword_parameters') keyword_parameters_string = "" device = device_type if device_type else device_number # Validations @@ -527,7 +451,7 @@ def validate_and_prepare_start_command(module, start_parms): return started_task_name, cmd -def prepare_display_command(module, display_parms): +def prepare_display_command(module): """Validates parameters and creates display command Parameters @@ -542,8 +466,8 @@ def prepare_display_command(module, display_parms): cmd The display command in string format. """ - identifier = display_parms.get('identifier_name') - job_name = display_parms.get('job_name') + identifier = module.params.get('identifier_name') + job_name = module.params.get('job_name') started_task_name = "" if job_name: started_task_name = job_name @@ -559,7 +483,7 @@ def prepare_display_command(module, display_parms): return started_task_name, cmd -def prepare_stop_command(module, stop_parms): +def prepare_stop_command(module): """Validates parameters and creates stop command Parameters @@ -574,9 +498,9 @@ def prepare_stop_command(module, stop_parms): cmd The stop command in string format. """ - identifier = stop_parms.get('identifier_name') - job_name = stop_parms.get('job_name') - asid = stop_parms.get('asid') + identifier = module.params.get('identifier_name') + job_name = module.params.get('job_name') + asid = module.params.get('asid') started_task_name = "" if job_name: started_task_name = job_name @@ -594,7 +518,7 @@ def prepare_stop_command(module, stop_parms): return started_task_name, cmd -def prepare_modify_command(module, modify_parms): +def prepare_modify_command(module): """Validates parameters and creates modify command Parameters @@ -609,9 +533,9 @@ def prepare_modify_command(module, modify_parms): cmd The modify command in string format. """ - identifier = modify_parms.get('identifier_name') - job_name = modify_parms.get('job_name') - parameters = modify_parms.get('parameters') + identifier = module.params.get('identifier_name') + job_name = module.params.get('job_name') + parameters = module.params.get('parameters') started_task_name = "" if job_name: started_task_name = job_name @@ -633,7 +557,7 @@ def prepare_modify_command(module, modify_parms): return started_task_name, cmd -def prepare_cancel_command(module, cancel_parms): +def prepare_cancel_command(module): """Validates parameters and creates cancel command Parameters @@ -648,12 +572,12 @@ def prepare_cancel_command(module, cancel_parms): cmd The cancel command in string format. """ - identifier = cancel_parms.get('identifier_name') - job_name = cancel_parms.get('job_name') - asid = cancel_parms.get('asid') - dump = cancel_parms.get('dump') - armrestart = cancel_parms.get('armrestart') - userid = cancel_parms.get('userid') + identifier = module.params.get('identifier_name') + job_name = module.params.get('job_name') + asid = module.params.get('asid') + dump = module.params.get('dump') + armrestart = module.params.get('armrestart') + userid = module.params.get('userid') started_task_name = "" if job_name: started_task_name = job_name @@ -683,12 +607,12 @@ def prepare_cancel_command(module, cancel_parms): return started_task_name, cmd -def prepare_force_command(module, force_parms): +def prepare_force_command(module): """Validates parameters and creates force command Parameters ---------- - force_parms : dict + module : dict The started task force command parameters. Returns @@ -698,14 +622,14 @@ def prepare_force_command(module, force_parms): cmd The force command in string format. """ - identifier = force_parms.get('identifier_name') - job_name = force_parms.get('job_name') - asid = force_parms.get('asid') - arm = force_parms.get('arm') - armrestart = force_parms.get('armrestart') - userid = force_parms.get('userid') - tcb_address = force_parms.get('tcb_address') - retry = force_parms.get('retry') + identifier = module.params.get('identifier_name') + job_name = module.params.get('job_name') + asid = module.params.get('asid') + arm = module.params.get('arm') + armrestart = module.params.get('armrestart') + userid = module.params.get('userid') + tcb_address = module.params.get('tcb_address') + retry = module.params.get('retry') started_task_name = "" if tcb_address and len(tcb_address) != 6: module.fail_json( @@ -764,39 +688,81 @@ def extract_keys(stdout): tasks The list of task parameters. """ - keys = {'A': 'ASID', 'CT': 'CPU_Time', 'ET': 'Elapsed_Time', 'WUID': 'WUID', 'USERID': 'USERID', 'P': 'Priority'} + keys = { + 'A': 'asid', + 'CT': 'cpu_time', + 'ET': 'elapsed_time', + 'WUID': 'work_unit_identifier', + 'USERID': 'userid', + 'P': 'priority', + 'PER': 'program_event_recording', + 'SMC': 'system_management_control', + 'PGN': 'program_name', + 'SCL': 'started_class_list', + 'WKL': 'workload_manager', + 'ASTE': 'address_space_table_entry', + 'RGP': 'resource_group', + 'DSPNAME': 'dataspace_name' + } lines = stdout.strip().split('\n') tasks = [] - current_task = None + current_task = {} task_header_regex = re.compile(r'^\s*(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)') kv_pattern = re.compile(r'(\S+)=(\S+)') for line in lines[5:]: line = line.strip() - if len(line.split()) >= 5 and task_header_regex.search(line): + match_firstline = task_header_regex.search(line) + if len(line.split()) >= 5 and match_firstline: if current_task: tasks.append(current_task) - match = task_header_regex.search(line) - current_task = { - "TASK_NAME": match.group(1), - "DETAILS": {} - } + current_task['task_name'] = match_firstline.group(1) for match in kv_pattern.finditer(line): key, value = match.groups() if key in keys: key = keys[key] - current_task["DETAILS"][key] = value + current_task[key] = value elif current_task: for match in kv_pattern.finditer(line): key, value = match.groups() if key in keys: key = keys[key] - current_task["DETAILS"][key] = value + current_task[key] = value if current_task: + el_time = current_task.get('elapsed_time') + if el_time: + current_task['started_time'] = calculate_start_time(el_time) tasks.append(current_task) return tasks -def fetch_logs(command): +def parse_time(ts_str): + # Case 1: Duration like "000.005seconds" + sec_match = re.match(r"^(\d+\.?\d*)\s*S?$", ts_str, re.IGNORECASE) + if sec_match: + return timedelta(seconds=float(sec_match.group(1))) + # Case 2: hh.mm.ss + hms_match = re.match(r"^(\d+).(\d{2}).(\d{2})$", ts_str) + if hms_match: + h, m, s = map(int, hms_match.groups()) + return timedelta(hours=h, minutes=m, seconds=s) + # Case 3: hhhhh.mm + hm_match = re.match(r"^(\d{1,5}).(\d{2})$", ts_str) + if hm_match: + h, m = map(int, hm_match.groups()) + return timedelta(hours=h, minutes=m) + + +def calculate_start_time(ts_str): + now = datetime.now() + parsed = parse_time(ts_str) + if parsed is None: + return "" + # If it's a timedelta (duration), subtract from now → absolute datetime + if isinstance(parsed, timedelta): + return f"{(now - parsed).strftime('%Y-%m-%d %H:%M:%S')}" + + +def fetch_logs(command, timeout): """Extracts keys and values from the given stdout Parameters @@ -809,7 +775,9 @@ def fetch_logs(command): list The list of logs from SYSLOG """ - stdout = zsystem.read_console(options='-t1') + time_mins = timeout // 60 + 1 + option = '-t' + str(time_mins) + stdout = zsystem.read_console(options=option) stdout_lines = stdout.splitlines() first = None pattern = rf"\b{command}\b" @@ -822,79 +790,6 @@ def fetch_logs(command): return logs -def parse_and_validate_args(params): - """Parse and validate input parameters - - Parameters - ---------- - params : dict - The dictionary which has input parameters. - - Returns - ------- - dict - The validated list of input parameters. - """ - start_args = dict( - device_type=dict(type="str", required=False), - device_number=dict(type="str", required=False), - identifier_name=dict(type="identifier_name", required=False, aliases=["identifier"]), - job_account=dict(type="str", required=False), - job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), - keyword_parameters=dict(type="basic_dict", required=False), - member_name=dict(type="member_name", required=False, aliases=["member"]), - parameters=dict(type="list", elements="str", required=False), - reus_asid=dict(type="str", required=False), - subsystem=dict(type="str", required=False), - volume_serial=dict(type="str", required=False) - ) - display_args = dict( - identifier_name=dict(type="identifier_name", required=False, aliases=["identifier"]), - job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]) - ) - modify_args = dict( - identifier_name=dict(type="identifier_name", required=False, aliases=["identifier"]), - job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), - parameters=dict(type="list", elements="str", required=False) - ) - cancel_args = dict( - armrestart=dict(type="bool", required=False), - asid=dict(type="str", required=False), - dump=dict(type="bool", required=False), - identifier_name=dict(type="identifier_name", required=False, aliases=["identifier"]), - job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), - userid=dict(type="str", required=False) - ) - force_args = dict( - arm=dict(type="bool", required=False), - armrestart=dict(type="bool", required=False), - asid=dict(type="str", required=False), - identifier_name=dict(type="identifier_name", required=False, aliases=["identifier"]), - job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), - retry=dict(type="str", required=False), - tcb_address=dict(type="str", required=False), - userid=dict(type="str", required=False) - ) - stop_args = dict( - asid=dict(type="str", required=False), - identifier_name=dict(type="identifier_name", required=False, aliases=["identifier"]), - job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]) - ) - module_args = dict( - start_task=dict(type="dict", required=False, options=start_args), - stop_task=dict(type="dict", required=False, options=stop_args), - display_task=dict(type="dict", required=False, options=display_args), - modify_task=dict(type="dict", required=False, options=modify_args), - cancel_task=dict(type="dict", required=False, options=cancel_args), - force_task=dict(type="dict", required=False, options=force_args), - verbose=dict(type="bool", required=False), - wait_time_s=dict(type="int", default=5) - ) - parser = better_arg_parser.BetterArgParser(module_args) - parsed_args = parser.parse_args(params) - return parsed_args - - def run_module(): """Initialize the module. @@ -903,81 +798,212 @@ def run_module(): fail_json z/OS started task operation failed. """ - start_args = dict( - device_type=dict(type="str", required=False), - device_number=dict(type="str", required=False), - identifier_name=dict(type="str", required=False, aliases=["identifier"]), - job_account=dict(type="str", required=False), - job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), - keyword_parameters=dict(type="dict", required=False, no_log=False), - member_name=dict(type="str", required=False, aliases=["member"]), - parameters=dict(type="list", elements="str", required=False), - reus_asid=dict(type="str", required=False, choices=["YES", "NO"]), - subsystem=dict(type="str", required=False), - volume_serial=dict(type="str", required=False) - ) - display_args = dict( - identifier_name=dict(type="str", required=False, aliases=["identifier"]), - job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]) - ) - modify_args = dict( - identifier_name=dict(type="str", required=False, aliases=["identifier"]), - job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), - parameters=dict(type="list", elements="str", required=False) - ) - cancel_args = dict( - armrestart=dict(type="bool", required=False), - asid=dict(type="str", required=False), - dump=dict(type="bool", required=False), - identifier_name=dict(type="str", required=False, aliases=["identifier"]), - job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), - userid=dict(type="str", required=False) - ) - force_args = dict( - arm=dict(type="bool", required=False), - armrestart=dict(type="bool", required=False), - asid=dict(type="str", required=False), - identifier_name=dict(type="str", required=False, aliases=["identifier"]), - job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]), - retry=dict(type="str", required=False, choices=["YES", "NO"]), - tcb_address=dict(type="str", required=False), - userid=dict(type="str", required=False) - ) - stop_args = dict( - asid=dict(type="str", required=False), - identifier_name=dict(type="str", required=False, aliases=["identifier"]), - job_name=dict(type="str", required=False, aliases=["job", "task", "task_name"]) - ) - - module_args = dict( - start_task=dict(type="dict", required=False, options=start_args), - stop_task=dict(type="dict", required=False, options=stop_args), - display_task=dict(type="dict", required=False, options=display_args), - modify_task=dict(type="dict", required=False, options=modify_args), - cancel_task=dict(type="dict", required=False, options=cancel_args), - force_task=dict(type="dict", required=False, options=force_args), - verbose=dict(type="bool", required=False, default=False), - wait_time_s=dict(type="int", default=5) - ) - module = AnsibleModule( - argument_spec=module_args, + argument_spec={ + 'state': { + 'type': 'str', + 'required': True, + 'choices': ['started', 'stopped', 'modified', 'displayed', 'forced', 'cancelled'] + }, + 'arm': { + 'type': 'bool', + 'required': False + }, + 'armrestart': { + 'type': 'bool', + 'required': False + }, + 'asid': { + 'type': 'str', + 'required': False + }, + 'device_number': { + 'type': 'str', + 'required': False + }, + 'device_type': { + 'type': 'str', + 'required': False + }, + 'dump': { + 'type': 'bool', + 'required': False + }, + 'identifier_name': { + 'type': 'str', + 'required': False, + 'aliases': ['identifier'] + }, + 'job_account': { + 'type': 'str', + 'required': False + }, + 'job_name': { + 'type': 'str', + 'required': False, + 'aliases': ['job', 'task_name', 'task'] + }, + 'keyword_parameters': { + 'type': 'dict', + 'required': False, + 'no_log': False + }, + 'member_name': { + 'type': 'str', + 'required': False, + 'aliases': ['member'] + }, + 'parameters': { + 'type': 'list', + 'elements': 'str', + 'required': False + }, + 'retry': { + 'type': 'str', + 'required': False, + 'choices': ['YES', 'NO'] + }, + 'reus_asid': { + 'type': 'str', + 'required': False, + 'choices': ['YES', 'NO'] + }, + 'subsystem': { + 'type': 'str', + 'required': False + }, + 'tcb_address': { + 'type': 'str', + 'required': False + }, + 'userid': { + 'type': 'str', + 'required': False + }, + 'verbose': { + 'type': 'bool', + 'required': False, + 'default': False + }, + 'volume_serial': { + 'type': 'str', + 'required': False + }, + 'wait_time_s': { + 'type': 'int', + 'required': False, + 'default': 0 + } + }, mutually_exclusive=[ - ["start_task", "stop_task", "display_task", "modify_task", "cancel_task", "force_task"] + ['device_number', 'device_type'] ], supports_check_mode=True ) + args_def = { + 'state': { + 'arg_type': 'str', + 'required': True + }, + 'arm': { + 'arg_type': 'bool', + 'required': False + }, + 'armrestart': { + 'arg_type': 'bool', + 'required': False + }, + 'asid': { + 'arg_type': 'str', + 'required': False + }, + 'device_number': { + 'arg_type': 'str', + 'required': False + }, + 'device_type': { + 'arg_type': 'str', + 'required': False + }, + 'dump': { + 'arg_type': 'bool', + 'required': False + }, + 'identifier_name': { + 'arg_type': 'identifier_name', + 'required': False, + 'aliases': ['identifier'] + }, + 'job_account': { + 'arg_type': 'str', + 'required': False + }, + 'job_name': { + 'arg_type': 'str', + 'required': False, + 'aliases': ['job', 'task_name', 'task'] + }, + 'keyword_parameters': { + 'arg_type': 'basic_dict', + 'required': False + }, + 'member_name': { + 'arg_type': 'member_name', + 'required': False, + 'aliases': ['member'] + }, + 'parameters': { + 'arg_type': 'list', + 'elements': 'str', + 'required': False + }, + 'retry': { + 'arg_type': 'str', + 'required': False + }, + 'reus_asid': { + 'arg_type': 'str', + 'required': False + }, + 'subsystem': { + 'arg_type': 'str', + 'required': False + }, + 'tcb_address': { + 'arg_type': 'str', + 'required': False + }, + 'userid': { + 'arg_type': 'str', + 'required': False + }, + 'verbose': { + 'arg_type': 'bool', + 'required': False + }, + 'volume_serial': { + 'arg_type': 'str', + 'required': False + }, + 'wait_time_s': { + 'arg_type': 'int', + 'required': False + } + } + try: - parms = parse_and_validate_args(module.params) + parser = better_arg_parser.BetterArgParser(args_def) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args except ValueError as err: module.fail_json( - rc=5, msg='Parameter verification failed.', stderr=str(err) ) - wait_time_s = parms.get('wait_time_s') - verbose = parms.get('verbose') + state = module.params.get('state') + wait_time_s = module.params.get('wait_time_s') + verbose = module.params.get('verbose') kwargs = {} """ Below error messages are used to detrmine if response has any error.When @@ -1011,29 +1037,29 @@ def run_module(): execute_display_before = False execute_display_after = False - if parms.get('start_task'): + if state == "started": err_msg = start_errmsg execute_display_after = True - started_task_name, cmd = validate_and_prepare_start_command(module, parms.get('start_task')) - elif parms.get('display_task'): + started_task_name, cmd = validate_and_prepare_start_command(module) + elif state == "displayed": err_msg = display_errmsg - started_task_name, cmd = prepare_display_command(module, parms.get('display_task')) - elif parms.get('stop_task'): + started_task_name, cmd = prepare_display_command(module) + elif state == "stopped": execute_display_before = True err_msg = stop_errmsg - started_task_name, cmd = prepare_stop_command(module, parms.get('stop_task')) - elif parms.get('cancel_task'): + started_task_name, cmd = prepare_stop_command(module) + elif state == "cancelled": execute_display_before = True err_msg = cancel_errmsg - started_task_name, cmd = prepare_cancel_command(module, parms.get('cancel_task')) - elif parms.get('force_task'): + started_task_name, cmd = prepare_cancel_command(module) + elif state == "forced": execute_display_before = True err_msg = force_errmsg - started_task_name, cmd = prepare_force_command(module, parms.get('force_task')) - elif parms.get('modify_task'): + started_task_name, cmd = prepare_force_command(module) + elif state == "modified": execute_display_after = True err_msg = modify_errmsg - started_task_name, cmd = prepare_modify_command(module, parms.get('modify_task')) + started_task_name, cmd = prepare_modify_command(module) changed = False stdout = "" stderr = "" @@ -1043,9 +1069,10 @@ def run_module(): if err != "" or any(msg in out for msg in err_msg): isFailed = True if not isFailed or verbose: - system_logs = fetch_logs(cmd.upper()) + system_logs = fetch_logs(cmd.upper(), wait_time_s) if any(msg in system_logs for msg in err_msg): isFailed = True + current_state = "" if isFailed: if rc == 0: rc = 1 @@ -1056,36 +1083,32 @@ def run_module(): stderr = out stdout = "" else: + current_state = state changed = True stdout = out stderr = err - if parms.get('display_task'): + if state == "displayed": task_params = extract_keys(out) result = dict() if module.check_mode: module.exit_json(**result) - state = "" - + result = dict( - changed=changed, - cmd=cmd, - tasks=task_params, - rc=rc, - stdout=stdout, - stderr=stderr, - stdout_lines=stdout.split('\n'), - stderr_lines=stderr.split('\n'), - ) + changed=changed, + state=current_state, + cmd=cmd, + tasks=task_params, + rc=rc, + stdout=stdout, + stderr=stderr, + stdout_lines=stdout.split('\n'), + stderr_lines=stderr.split('\n'), + ) if verbose: result["verbose_output"] = system_logs - if parms.get('display_task') or parms.get('modify_task'): - if len(task_params) > 0 and not isFailed: - result["state"] = "Active" - else: - result["state"] = "NotActive" - + module.exit_json(**result) diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index d678ed6ee7..6e399f9752 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -43,9 +43,8 @@ def test_start_task_with_invalid_member(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMTASK" - } + state = "started", + member_name = "SAMTASK" ) for result in start_results.contacted.values(): print(result) @@ -53,9 +52,8 @@ def test_start_task_with_invalid_member(ansible_zos_module): assert result.get("stderr") is not None start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLETASK" - } + state = "started", + member_name = "SAMPLETASK" ) for result in start_results.contacted.values(): print(result) @@ -66,11 +64,10 @@ def test_start_task_with_invalid_member(ansible_zos_module): def test_start_task_with_jobname_identifier(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLE", - "job_name": "SAMTASK", - "identifier": "TESTER" - } + state = "started", + member_name = "SAMPLE", + job_name = "SAMTASK", + identifier = "TESTER" ) for result in start_results.contacted.values(): print(result) @@ -81,10 +78,9 @@ def test_start_task_with_jobname_identifier(ansible_zos_module): def test_start_task_with_invalid_identifier(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLE", - "identifier": "$HELLO" - } + state = "started", + member_name = "SAMPTASK", + identifier = "$HELLO" ) for result in start_results.contacted.values(): @@ -94,10 +90,9 @@ def test_start_task_with_invalid_identifier(ansible_zos_module): assert result.get("stderr") is not None start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLE", - "identifier": "HELLO" - } + state = "started", + member_name = "SAMPLE", + identifier = "HELLO" ) for result in start_results.contacted.values(): print(result) @@ -109,10 +104,9 @@ def test_start_task_with_invalid_jobaccount(ansible_zos_module): hosts = ansible_zos_module job_account = "(T043JM,JM00,1,0,0,This is the invalid job account information to test negative scenario)" start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLE", - "job_account": job_account - } + state = "started", + member_name = "SAMPLE", + job_account = job_account ) for result in start_results.contacted.values(): @@ -124,10 +118,9 @@ def test_start_task_with_invalid_jobaccount(ansible_zos_module): def test_start_task_with_invalid_devicenum(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLE", - "device_number": "0870" - } + state = "started", + member_name = "SAMPLE", + device_number = "0870" ) for result in start_results.contacted.values(): @@ -139,10 +132,9 @@ def test_start_task_with_invalid_devicenum(ansible_zos_module): def test_start_task_with_invalid_volumeserial(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLE", - "volume_serial": "12345A" - } + state = "started", + member_name = "SAMPLE", + volume_serial = "12345A" ) for result in start_results.contacted.values(): @@ -154,10 +146,9 @@ def test_start_task_with_invalid_volumeserial(ansible_zos_module): def test_start_task_with_invalid_parameters(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLE", - "parameters": ["KEY1"] - } + state = "started", + member_name = "SAMPLE", + parameters = ["KEY1"] ) for result in start_results.contacted.values(): @@ -167,27 +158,25 @@ def test_start_task_with_invalid_parameters(ansible_zos_module): assert result.get("cmd") == "S SAMPLE,,,'KEY1'" start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLE", - "parameters": ["KEY1", "KEY2", "KEY3"], - "volume_serial": "12345" - } + state = "started", + member_name = "SAMPLE", + parameters = ["KEY1", "KEY2", "KEY3"], + volume_serial = "123456" ) for result in start_results.contacted.values(): print(result) assert result.get("changed") is False assert result.get("stderr") is not None - assert result.get("cmd") == "S SAMPLE,,12345,(KEY1,KEY2,KEY3)" + assert result.get("cmd") == "S SAMPLE,,123456,(KEY1,KEY2,KEY3)" def test_start_task_with_devicenum_devicetype_negative(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLE", - "device_number": "/0870", - "device_type": "TEST" - } + state = "started", + member_name = "SAMPLE", + device_number = "/0870", + device_type = "TEST" ) for result in start_results.contacted.values(): print(result) @@ -199,10 +188,9 @@ def test_start_task_with_devicenum_devicetype_negative(ansible_zos_module): def test_start_task_with_invalid_subsystem_negative(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - start_task={ - "member": "VLF", - "subsystem": "MSTRS" - } + state = "started", + member_name = "VLF", + subsystem = "MSTRS" ) for result in start_results.contacted.values(): print(result) @@ -214,11 +202,10 @@ def test_start_task_with_invalid_subsystem_negative(ansible_zos_module): def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - start_task={ - "member": "VLF", - "keyword_parameters":{ - "key1key1key1key1key1key1key1key1": "value1value1value1value1value1value1" - } + state = "started", + member_name = "VLF", + keyword_parameters = { + "key1key1key1key1key1key1key1key1": "value1value1value1value1value1value1" } ) for result in start_results.contacted.values(): @@ -227,11 +214,10 @@ def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): assert result.get("failed") is True assert result.get("msg") is not None start_results = hosts.all.zos_started_task( - start_task={ - "member": "VLF", - "keyword_parameters":{ - "key1key1key1key1key1key1key1key1key1key1key1key1": "value1" - } + state = "started", + member_name = "VLF", + keyword_parameters = { + "key1key1key1key1key1key1key1key1key1key1key1key1": "value1" } ) for result in start_results.contacted.values(): @@ -240,12 +226,11 @@ def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): assert result.get("failed") is True assert result.get("msg") is not None start_results = hosts.all.zos_started_task( - start_task={ - "member": "VLF", - "keyword_parameters":{ - "KEY1": "VALUE1", - "KEY2": "VALUE2" - } + state = "started", + member_name = "VLF", + keyword_parameters = { + "KEY1": "VALUE1", + "KEY2": "VALUE2" } ) for result in start_results.contacted.values(): @@ -258,10 +243,9 @@ def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): def test_start_task_using_nonexisting_devicenum_negative(ansible_zos_module): hosts = ansible_zos_module start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLE", - "device_number": "/ABCD" - } + state = "started", + member_name = "SAMPLE", + device_number = "/ABCD" ) for result in start_results.contacted.values(): print(result) @@ -272,9 +256,8 @@ def test_start_task_using_nonexisting_devicenum_negative(ansible_zos_module): def test_display_task_negative(ansible_zos_module): hosts = ansible_zos_module display_results = hosts.all.zos_started_task( - display_task={ - "identifier": "SAMPLE" - } + state = "displayed", + identifier = "SAMPLE" ) for result in display_results.contacted.values(): print(result) @@ -282,40 +265,23 @@ def test_display_task_negative(ansible_zos_module): assert result.get("failed") is True assert result.get("msg") is not None -def test_start_and_display_task_negative(ansible_zos_module): - hosts = ansible_zos_module - display_results = hosts.all.zos_started_task( - start_task={ - "member": "SAMPLE" - }, - display_task={ - "job": "SAMPLE" - } - ) - for result in display_results.contacted.values(): - print(result) - assert result.get("changed") is False - assert result.get("failed") is True - assert result.get("msg") is not None def test_stop_task_negative(ansible_zos_module): hosts = ansible_zos_module stop_results = hosts.all.zos_started_task( - stop_task={ - "identifier": "SAMPLE" - } + state = "stopped", + job_name = "SAMPLE" ) for result in stop_results.contacted.values(): print(result) assert result.get("changed") is False assert result.get("failed") is True - assert result.get("msg") is not None + assert result.get("stderr") is not None stop_results = hosts.all.zos_started_task( - stop_task={ - "job_name": "TESTER", - "identifier": "SAMPLE" - } + state = "stopped", + job_name = "TESTER", + identifier = "SAMPLE" ) for result in stop_results.contacted.values(): print(result) @@ -326,9 +292,8 @@ def test_stop_task_negative(ansible_zos_module): def test_modify_task_negative(ansible_zos_module): hosts = ansible_zos_module modify_results = hosts.all.zos_started_task( - modify_task={ - "identifier": "SAMPLE" - } + state = "modified", + identifier = "SAMPLE" ) for result in modify_results.contacted.values(): print(result) @@ -337,9 +302,8 @@ def test_modify_task_negative(ansible_zos_module): assert result.get("msg") is not None modify_results = hosts.all.zos_started_task( - modify_task={ - "job_name": "TESTER" - } + state = "modified", + job_name = "TESTER" ) for result in modify_results.contacted.values(): print(result) @@ -348,11 +312,10 @@ def test_modify_task_negative(ansible_zos_module): assert result.get("msg") is not None modify_results = hosts.all.zos_started_task( - modify_task={ - "job_name": "TESTER", - "identifier": "SAMPLE", - "parameters": ["REPLACE", "VX=10"] - } + state = "modified", + job_name = "TESTER", + identifier = "SAMPLE", + parameters = ["REPLACE", "VX=10"] ) for result in modify_results.contacted.values(): print(result) @@ -363,9 +326,8 @@ def test_modify_task_negative(ansible_zos_module): def test_cancel_task_negative(ansible_zos_module): hosts = ansible_zos_module cancel_results = hosts.all.zos_started_task( - cancel_task={ - "identifier": "SAMPLE" - } + state = "cancelled", + identifier = "SAMPLE" ) for result in cancel_results.contacted.values(): print(result) @@ -374,10 +336,9 @@ def test_cancel_task_negative(ansible_zos_module): assert result.get("msg") is not None cancel_results = hosts.all.zos_started_task( - cancel_task={ - "job_name": "TESTER", - "identifier": "SAMPLE" - } + state = "cancelled", + job_name = "TESTER", + identifier = "SAMPLE" ) for result in cancel_results.contacted.values(): print(result) @@ -385,11 +346,10 @@ def test_cancel_task_negative(ansible_zos_module): assert result.get("stderr") is not None assert result.get("cmd") == "C TESTER.SAMPLE" cancel_results = hosts.all.zos_started_task( - cancel_task={ - "userid": "OMVSTEST", - "asid": "0012", - "dump": True - }, + state = "cancelled", + asid = "0012", + userid = "OMVSTEST", + dump = True, verbose=True ) for result in cancel_results.contacted.values(): @@ -398,10 +358,9 @@ def test_cancel_task_negative(ansible_zos_module): assert result.get("stderr") is not None assert result.get("cmd") == "C U=OMVSTEST,A=0012,DUMP" cancel_results = hosts.all.zos_started_task( - cancel_task={ - "userid": "OMVSADM", - "armrestart": True - } + state = "cancelled", + userid = "OMVSADM", + armrestart = True ) for result in cancel_results.contacted.values(): print(result) @@ -412,9 +371,8 @@ def test_cancel_task_negative(ansible_zos_module): def test_force_task_negative(ansible_zos_module): hosts = ansible_zos_module force_results = hosts.all.zos_started_task( - force_task={ - "identifier": "SAMPLE" - } + state = "forced", + identifier = "SAMPLE" ) for result in force_results.contacted.values(): print(result) @@ -423,10 +381,9 @@ def test_force_task_negative(ansible_zos_module): assert result.get("msg") is not None force_results = hosts.all.zos_started_task( - force_task={ - "job_name": "TESTER", - "identifier": "SAMPLE" - } + state = "forced", + job_name = "TESTER", + identifier = "SAMPLE" ) for result in force_results.contacted.values(): print(result) @@ -434,10 +391,9 @@ def test_force_task_negative(ansible_zos_module): assert result.get("stderr") is not None assert result.get("cmd") == "FORCE TESTER.SAMPLE" force_results = hosts.all.zos_started_task( - force_task={ - "userid": "OMVSADM", - "armrestart": True - } + state = "forced", + userid = "OMVSADM", + armrestart = True ) for result in force_results.contacted.values(): print(result) @@ -446,10 +402,9 @@ def test_force_task_negative(ansible_zos_module): assert result.get("msg") is not None force_results = hosts.all.zos_started_task( - force_task={ - "job_name": "TESTER", - "retry": "YES" - } + state = "forced", + job_name = "TESTER", + retry = "YES" ) for result in force_results.contacted.values(): print(result) @@ -457,11 +412,10 @@ def test_force_task_negative(ansible_zos_module): assert result.get("failed") is True assert result.get("msg") is not None force_results = hosts.all.zos_started_task( - force_task={ - "job_name": "TESTER", - "tcb_address": "0006789", - "retry": "YES" - } + state = "forced", + job_name = "TESTER", + tcb_address = "0006789", + retry = "YES" ) for result in force_results.contacted.values(): print(result) @@ -469,12 +423,11 @@ def test_force_task_negative(ansible_zos_module): assert result.get("failed") is True assert result.get("msg") is not None force_results = hosts.all.zos_started_task( - force_task={ - "job_name": "TESTER", - "identifier": "SAMPLE", - "tcb_address": "000678", - "retry": "YES" - } + state = "forced", + job_name = "TESTER", + identifier = "SAMPLE", + tcb_address = "000678", + retry = "YES" ) for result in force_results.contacted.values(): print(result) @@ -482,11 +435,10 @@ def test_force_task_negative(ansible_zos_module): assert result.get("stderr") is not None assert result.get("cmd") == "FORCE TESTER.SAMPLE,TCB=000678,RETRY=YES" force_results = hosts.all.zos_started_task( - force_task={ - "userid": "OMVSTEST", - "tcb_address": "000678", - "retry": "YES" - }, + state = "forced", + userid = "OMVSTEST", + tcb_address = "000678", + retry = "YES", verbose=True ) for result in force_results.contacted.values(): @@ -515,9 +467,8 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): ) start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLE" - }, + state = "started", + member_name = "SAMPLE", verbose=True ) @@ -530,9 +481,8 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("verbose_output") is not None force_results = hosts.all.zos_started_task( - force_task={ - "task_name": "SAMPLE" - } + state = "forced", + task_name = "SAMPLE" ) for result in force_results.contacted.values(): print(result) @@ -542,9 +492,8 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert "CANCELABLE - ISSUE CANCEL BEFORE FORCE" in result.get("stderr") stop_results = hosts.all.zos_started_task( - cancel_task={ - "task_name": "SAMPLE" - } + state = "cancelled", + task_name = "SAMPLE" ) for result in stop_results.contacted.values(): @@ -557,11 +506,10 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): # validate identifier start_results = hosts.all.zos_started_task( - start_task={ - "member_name": "SAMPLE", - "identifier": "TESTER", - "reus_asid": "YES" - } + state = "started", + member = "SAMPLE", + identifier = "TESTER", + reus_asid = "YES" ) for result in start_results.contacted.values(): print(result) @@ -573,9 +521,8 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("cmd") == "S SAMPLE.TESTER,REUSASID=YES" stop_results = hosts.all.zos_started_task( - cancel_task={ - "task_name": "SAMPLE" - } + state = "cancelled", + task_name = "SAMPLE" ) for result in stop_results.contacted.values(): print(result) @@ -585,10 +532,9 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("verbose_output") is None stop_results = hosts.all.zos_started_task( - cancel_task={ - "task_name": "SAMPLE", - "identifier": "TESTER" - } + state = "cancelled", + task_name = "SAMPLE", + identifier = "TESTER" ) for result in stop_results.contacted.values(): print(result) @@ -600,10 +546,9 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): job_account = "(T043JM,JM00,1,0,0,)" start_results = hosts.all.zos_started_task( - start_task={ - "member": "SAMPLE", - "job_account": job_account - } + state = "started", + member = "SAMPLE", + job_account = job_account ) for result in start_results.contacted.values(): @@ -615,9 +560,8 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("verbose_output") is None display_result = hosts.all.zos_started_task( - display_task={ - "task_name": "SAMPLE" - } + state = "displayed", + task = "SAMPLE" ) for result in display_result.contacted.values(): print(result) @@ -631,10 +575,9 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) stop_results = hosts.all.zos_started_task( - cancel_task={ - "task_name": "SAMPLE", - "asid": asid_val - }, + state = "cancelled", + task_name = "SAMPLE", + asid = asid_val, verbose=True ) @@ -675,10 +618,9 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): ) start_results = hosts.all.zos_started_task( - start_task={ - "member": "SAMPLE", - "job_name": "TESTTSK" - } + state = "started", + member = "SAMPLE", + job_name = "TESTTSK" ) for result in start_results.contacted.values(): @@ -688,9 +630,8 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): assert result.get("stderr") == "" stop_results = hosts.all.zos_started_task( - cancel_task={ - "task_name": "TESTTSK" - } + state = "cancelled", + task = "TESTTSK" ) for result in stop_results.contacted.values(): @@ -709,99 +650,92 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): ) def test_stop_and_modify_with_vlf_task(ansible_zos_module): - hosts = ansible_zos_module - modify_results = hosts.all.zos_started_task( - modify_task={ - "task_name": "VLF", - "parameters": ["REPLACE" ,"NN=00"] - } - ) - for result in modify_results.contacted.values(): - print(result) - assert result.get("changed") is True - assert result.get("rc") == 0 - assert result.get("stderr") == "" - assert result.get("cmd") == "F VLF,REPLACE,NN=00" + hosts = ansible_zos_module + modify_results = hosts.all.zos_started_task( + state = "modified", + task = "VLF", + parameters = ["REPLACE" ,"NN=00"] + ) + for result in modify_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert result.get("cmd") == "F VLF,REPLACE,NN=00" - display_result = hosts.all.zos_started_task( - display_task={ - "task_name": "VLF" - } - ) - for result in display_result.contacted.values(): - print(result) - assert result.get("changed") is True - assert result.get("rc") == 0 - assert result.get("stderr") == "" - assert len(result.get("tasks")) > 0 - assert result.get("verbose_output") is None + display_result = hosts.all.zos_started_task( + state = "displayed", + task = "VLF" + ) + for result in display_result.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") is None - display_output = list(display_result.contacted.values())[0].get("stdout") - asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) + display_output = list(display_result.contacted.values())[0].get("stdout") + asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) - stop_results = hosts.all.zos_started_task( - stop_task={ - "task_name": "VLF", - "asid": asid_val - } - ) - for result in stop_results.contacted.values(): - print(result) - assert result.get("changed") is True - assert result.get("rc") == 0 - assert result.get("stderr") == "" - assert result.get("cmd") == f"P VLF,A={asid_val}" + stop_results = hosts.all.zos_started_task( + state = "stopped", + task = "VLF", + asid = asid_val + ) + for result in stop_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert result.get("cmd") == f"P VLF,A={asid_val}" - start_results = hosts.all.zos_started_task( - start_task={ - "member": "VLF", - "identifier": "TESTER", - "subsystem": "MSTR" - } - ) - for result in start_results.contacted.values(): - print(result) - assert result.get("changed") is True - assert result.get("rc") == 0 - assert result.get("stderr") == "" + start_results = hosts.all.zos_started_task( + state = "started", + member = "VLF", + identifier = "TESTER", + subsystem = "MSTR" + ) + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" - modify_results = hosts.all.zos_started_task( - modify_task={ - "task_name": "VLF", - "identifier": "TESTER", - "parameters": ["REPLACE" ,"NN=00"] - } - ) - for result in modify_results.contacted.values(): - print(result) - assert result.get("changed") is True - assert result.get("rc") == 0 - assert result.get("stderr") == "" - assert result.get("cmd") == "F VLF.TESTER,REPLACE,NN=00" - - stop_results = hosts.all.zos_started_task( - stop_task={ - "task_name": "VLF", - "identifier": "TESTER" - } - ) - for result in stop_results.contacted.values(): - print(result) - assert result.get("changed") is True - assert result.get("rc") == 0 - assert result.get("stderr") == "" + modify_results = hosts.all.zos_started_task( + state = "modified", + task = "VLF", + identifier = "TESTER", + parameters = ["REPLACE" ,"NN=00"] + ) + for result in modify_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert result.get("cmd") == "F VLF.TESTER,REPLACE,NN=00" + + stop_results = hosts.all.zos_started_task( + state = "stopped", + task = "VLF", + identifier = "TESTER" + ) + for result in stop_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" - start_results = hosts.all.zos_started_task( - start_task={ - "member": "VLF", - "subsystem": "MSTR" - } - ) - for result in start_results.contacted.values(): - print(result) - assert result.get("changed") is True - assert result.get("rc") == 0 - assert result.get("stderr") == "" + start_results = hosts.all.zos_started_task( + state = "started", + member = "VLF", + subsystem = "MSTR" + ) + for result in start_results.contacted.values(): + print(result) + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): @@ -824,10 +758,9 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): ) start_results = hosts.all.zos_started_task( - start_task={ - "member": "SAMPLE2", - "job_name": "SPROC" - }, + state = "started", + member = "SAMPLE2", + job_name = "SPROC", verbose=True ) @@ -838,9 +771,8 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): assert result.get("stderr") == "" stop_results = hosts.all.zos_started_task( - cancel_task={ - "task_name": "SPROC" - } + state = "cancelled", + task = "SPROC" ) for result in stop_results.contacted.values(): From 38cf15b5ddb3a6653c3299b36a339320bbf11c9f Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Wed, 10 Sep 2025 22:09:48 +0530 Subject: [PATCH 36/73] Update better_arg_parser.py --- plugins/module_utils/better_arg_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index a42301ede4..9f22c36c5f 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -280,7 +280,7 @@ def _basic_dict_type(self, contents, resolve_dependencies): if not isinstance(contents, dict): raise ValueError('Invalid argument "{0}" for type "dict".'.format(contents)) return contents - + def _str_type(self, contents, resolve_dependencies): """Resolver for str type arguments. From 20f92234f39ee81fda2049b96647c3aa4f14edb2 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Wed, 10 Sep 2025 22:14:49 +0530 Subject: [PATCH 37/73] Delete 2202-zos_data_set-Support-noscratch-options.yml --- .../2202-zos_data_set-Support-noscratch-options.yml | 10 ---------- 1 file changed, 10 deletions(-) delete mode 100644 changelogs/fragments/2202-zos_data_set-Support-noscratch-options.yml diff --git a/changelogs/fragments/2202-zos_data_set-Support-noscratch-options.yml b/changelogs/fragments/2202-zos_data_set-Support-noscratch-options.yml deleted file mode 100644 index d1bf0fdad0..0000000000 --- a/changelogs/fragments/2202-zos_data_set-Support-noscratch-options.yml +++ /dev/null @@ -1,10 +0,0 @@ -minor_changes: - - zos_data_set - Adds `noscratch` option to allow uncataloging - a data set without deleting it from the volume's VTOC. - (https://github.com/ansible-collections/ibm_zos_core/pull/2202) -trivial: - - data_set - Internal updates to support the noscratch option. - https://github.com/ansible-collections/ibm_zos_core/pull/2202) - - test_zos_data_set_func - added test case to verify the `noscratch` option - functionality in zos_data_set module. - (https://github.com/ansible-collections/ibm_zos_core/pull/2202). From 45fd79df34938f6aad5467e7d7006a79be8f3862 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Wed, 10 Sep 2025 23:47:47 +0530 Subject: [PATCH 38/73] Update zos_started_task.py --- plugins/modules/zos_started_task.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 8ef0f4a920..4b927231fa 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -702,7 +702,11 @@ def extract_keys(stdout): 'WKL': 'workload_manager', 'ASTE': 'address_space_table_entry', 'RGP': 'resource_group', - 'DSPNAME': 'dataspace_name' + 'DSPNAME': 'dataspace_name', + 'DMN': 'domain_number', + 'AFF': 'affinity', + 'SRVR': 'server', + 'QSC': 'queue_scan_count' } lines = stdout.strip().split('\n') tasks = [] @@ -720,13 +724,13 @@ def extract_keys(stdout): key, value = match.groups() if key in keys: key = keys[key] - current_task[key] = value + current_task[key.lower()] = value elif current_task: for match in kv_pattern.finditer(line): key, value = match.groups() if key in keys: key = keys[key] - current_task[key] = value + current_task[key.lower()] = value if current_task: el_time = current_task.get('elapsed_time') if el_time: @@ -1105,9 +1109,8 @@ def run_module(): stderr=stderr, stdout_lines=stdout.split('\n'), stderr_lines=stderr.split('\n'), + verbose_output=system_logs ) - if verbose: - result["verbose_output"] = system_logs module.exit_json(**result) From ac961fb9ce0ddb15e4f096afc753b63351932244 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Thu, 11 Sep 2025 00:02:22 +0530 Subject: [PATCH 39/73] Update testcases --- plugins/modules/zos_started_task.py | 3 +++ .../modules/test_zos_started_task_func.py | 20 +++++++++++-------- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 4b927231fa..cf7d94b923 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -1072,10 +1072,13 @@ def run_module(): system_logs = "" if err != "" or any(msg in out for msg in err_msg): isFailed = True + # Fetch system logs to validate any error occured in execution if not isFailed or verbose: system_logs = fetch_logs(cmd.upper(), wait_time_s) if any(msg in system_logs for msg in err_msg): isFailed = True + if not verbose: + system_logs = "" current_state = "" if isFailed: if rc == 0: diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index 6e399f9752..01652708ba 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -345,6 +345,7 @@ def test_cancel_task_negative(ansible_zos_module): assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "C TESTER.SAMPLE" + assert result.get("verbose_output") == "" cancel_results = hosts.all.zos_started_task( state = "cancelled", asid = "0012", @@ -357,6 +358,7 @@ def test_cancel_task_negative(ansible_zos_module): assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "C U=OMVSTEST,A=0012,DUMP" + assert result.get("verbose_output") != "" cancel_results = hosts.all.zos_started_task( state = "cancelled", userid = "OMVSADM", @@ -446,6 +448,7 @@ def test_force_task_negative(ansible_zos_module): assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "FORCE U=OMVSTEST,TCB=000678,RETRY=YES" + assert result.get("verbose_output") != "" def test_start_and_cancel_zos_started_task(ansible_zos_module): @@ -478,7 +481,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("rc") == 0 assert result.get("stderr") == "" assert len(result.get("tasks")) > 0 - assert result.get("verbose_output") is not None + assert result.get("verbose_output") != "" force_results = hosts.all.zos_started_task( state = "forced", @@ -502,7 +505,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("rc") == 0 assert result.get("stderr") == "" assert len(result.get("tasks")) > 0 - assert result.get("verbose_output") is None + assert result.get("verbose_output") == "" # validate identifier start_results = hosts.all.zos_started_task( @@ -517,7 +520,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("rc") == 0 assert result.get("stderr") == "" assert len(result.get("tasks")) > 0 - assert result.get("verbose_output") is None + assert result.get("verbose_output") == "" assert result.get("cmd") == "S SAMPLE.TESTER,REUSASID=YES" stop_results = hosts.all.zos_started_task( @@ -529,7 +532,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("changed") is False assert result.get("stderr") is not None assert len(result.get("tasks")) > 0 - assert result.get("verbose_output") is None + assert result.get("verbose_output") == "" stop_results = hosts.all.zos_started_task( state = "cancelled", @@ -542,7 +545,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("rc") == 0 assert result.get("stderr") == "" assert len(result.get("tasks")) > 0 - assert result.get("verbose_output") is None + assert result.get("verbose_output") == "" job_account = "(T043JM,JM00,1,0,0,)" start_results = hosts.all.zos_started_task( @@ -569,7 +572,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("rc") == 0 assert result.get("stderr") == "" assert len(result.get("tasks")) > 0 - assert result.get("verbose_output") is None + assert result.get("verbose_output") == "" display_output = list(display_result.contacted.values())[0].get("stdout") asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) @@ -587,7 +590,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("rc") == 0 assert result.get("stderr") == "" assert len(result.get("tasks")) > 0 - assert result.get("verbose_output") is not None + assert result.get("verbose_output") != "" finally: hosts.all.file(path=temp_path, state="absent") @@ -673,7 +676,7 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): assert result.get("rc") == 0 assert result.get("stderr") == "" assert len(result.get("tasks")) > 0 - assert result.get("verbose_output") is None + assert result.get("verbose_output") == "" display_output = list(display_result.contacted.values())[0].get("stdout") asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) @@ -769,6 +772,7 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" + assert result.get("verbose_output") != "" stop_results = hosts.all.zos_started_task( state = "cancelled", From 416cc5b9c10b5138b9ef38c9638ec068b6699272 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Thu, 11 Sep 2025 00:06:43 +0530 Subject: [PATCH 40/73] Update test_zos_started_task_func.py --- tests/functional/modules/test_zos_started_task_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index 01652708ba..3c03fd1b62 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -560,7 +560,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("rc") == 0 assert result.get("stderr") == "" assert len(result.get("tasks")) > 0 - assert result.get("verbose_output") is None + assert result.get("verbose_output") == "" display_result = hosts.all.zos_started_task( state = "displayed", From b5a225c9fb38a00af734a6a85c9cb1c3377d548c Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Thu, 11 Sep 2025 17:10:43 +0530 Subject: [PATCH 41/73] Update zos_started_task.py --- plugins/modules/zos_started_task.py | 42 ++++++++++++++++++++--------- 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index cf7d94b923..f4d92f6e73 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -214,11 +214,11 @@ type: list elements: dict contains: - job_name: + address_space_table_entry: description: - The name of the batch job. + The name of the started task. type: str - sample: LINKJOB + sample: SAMPLE stdout: description: The STDOUT from the command, may be empty. returned: changed @@ -268,7 +268,7 @@ # zoau_exceptions = ZOAUImportError(traceback.format_exc()) -def execute_command(operator_cmd, started_task_name, execute_display_before=False, execute_display_after=False, timeout_s=1, *args, **kwargs): +def execute_command(operator_cmd, started_task_name, execute_display_before=False, execute_display_after=False, timeout_s=1, **kwargs): """Execute operator command. Parameters @@ -293,7 +293,7 @@ def execute_command(operator_cmd, started_task_name, execute_display_before=Fals timeout_c = 100 * timeout_s if execute_display_before: task_params = execute_display_command(started_task_name, timeout_c) - response = opercmd.execute(operator_cmd, timeout_c, *args, **kwargs) + response = opercmd.execute(operator_cmd, timeout_c, **kwargs) if execute_display_after: task_params = execute_display_command(started_task_name, timeout_c) @@ -700,7 +700,8 @@ def extract_keys(stdout): 'PGN': 'program_name', 'SCL': 'started_class_list', 'WKL': 'workload_manager', - 'ASTE': 'address_space_table_entry', + 'ASTE': 'data_space_address_entry', + 'ADDR SPACE ASTE': 'address_space_second_table_entry', 'RGP': 'resource_group', 'DSPNAME': 'dataspace_name', 'DMN': 'domain_number', @@ -711,15 +712,28 @@ def extract_keys(stdout): lines = stdout.strip().split('\n') tasks = [] current_task = {} + aste_key = "ADDR SPACE ASTE" task_header_regex = re.compile(r'^\s*(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)') - kv_pattern = re.compile(r'(\S+)=(\S+)') + kv_pattern = re.compile(rf'({re.escape(aste_key)}|\S+)=(\S+)') for line in lines[5:]: line = line.strip() match_firstline = task_header_regex.search(line) if len(line.split()) >= 5 and match_firstline: if current_task: + el_time = current_task.get('elapsed_time') + if el_time: + current_task['started_time'] = calculate_start_time(el_time) tasks.append(current_task) + current_task = {} current_task['task_name'] = match_firstline.group(1) + current_task['task_identifier'] = match_firstline.group(2) + if "=" not in match_firstline.group(5): + current_task['proc_step_name'] = match_firstline.group(3) + current_task['task_type'] = match_firstline.group(4) + current_task['task_status'] = match_firstline.group(5) + else: + current_task['task_type'] = match_firstline.group(3) + current_task['task_status'] = match_firstline.group(4) for match in kv_pattern.finditer(line): key, value = match.groups() if key in keys: @@ -730,7 +744,10 @@ def extract_keys(stdout): key, value = match.groups() if key in keys: key = keys[key] - current_task[key.lower()] = value + if current_task.get(key.lower()): + current_task[key.lower()] = [current_task[key.lower()], value] + else: + current_task[key.lower()] = value if current_task: el_time = current_task.get('elapsed_time') if el_time: @@ -741,6 +758,8 @@ def extract_keys(stdout): def parse_time(ts_str): # Case 1: Duration like "000.005seconds" + print("hiiiii") + print(ts_str) sec_match = re.match(r"^(\d+\.?\d*)\s*S?$", ts_str, re.IGNORECASE) if sec_match: return timedelta(seconds=float(sec_match.group(1))) @@ -1028,15 +1047,14 @@ def run_module(): cancel_errmsg = ['NOT ACTIVE', 'NOT LOGGED ON', 'INVALID PARAMETER', 'DUPLICATE NAME FOUND'] force_errmsg = ['NOT ACTIVE', 'NOT LOGGED ON', 'INVALID PARAMETER', 'CANCELABLE', 'DUPLICATE NAME FOUND'] err_msg = [] - + kwargs = {} use_wait_arg = False if zoau_version_checker.is_zoau_version_higher_than("1.2.4"): use_wait_arg = True - if use_wait_arg: + if use_wait_arg or wait_time_s: kwargs.update({"wait": True}) - args = [] cmd = "" execute_display_before = False @@ -1067,7 +1085,7 @@ def run_module(): changed = False stdout = "" stderr = "" - rc, out, err, task_params = execute_command(cmd, started_task_name, execute_display_before, execute_display_after, timeout_s=wait_time_s, *args, **kwargs) + rc, out, err, task_params = execute_command(cmd, started_task_name, execute_display_before, execute_display_after, timeout_s=wait_time_s, **kwargs) isFailed = False system_logs = "" if err != "" or any(msg in out for msg in err_msg): From 8f0dda36cb85652b6c0147440de7322ca1ccee4b Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Thu, 11 Sep 2025 23:57:58 +0530 Subject: [PATCH 42/73] adding time zone changes --- plugins/modules/zos_started_task.py | 42 ++++++++++++++++------------- tests/sanity/ignore-2.15.txt | 1 + tests/sanity/ignore-2.16.txt | 1 + tests/sanity/ignore-2.17.txt | 1 + tests/sanity/ignore-2.18.txt | 3 ++- tests/sanity/ignore-2.19.txt | 3 ++- 6 files changed, 30 insertions(+), 21 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index f4d92f6e73..f82dfc4a2d 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -21,7 +21,7 @@ module: zos_started_task version_added: 1.16.0 author: - - "Ravella Surendra Babu (@surendra.ravella582)" + - "Ravella Surendra Babu (@surendrababuravella)" short_description: Perform operations on started tasks. description: - start, display, modify, cancel, force and stop a started task @@ -161,12 +161,12 @@ required: false type: bool default: false - wait_time_s: + wait_time: required: false default: 0 type: int description: - - Option I(wait_time_s) is the the maximum amount of time, in seconds, to wait for a response after submitting + - Option I(wait_time) is the the maximum amount of time, in seconds, to wait for a response after submitting the console command. Default value of 0 means to wait the default amount of time supported by the opercmd utility. """ EXAMPLES = r""" @@ -250,9 +250,6 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( better_arg_parser ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - zoau_version_checker -) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( ZOAUImportError ) @@ -740,14 +737,26 @@ def extract_keys(stdout): key = keys[key] current_task[key.lower()] = value elif current_task: + data_space = {} for match in kv_pattern.finditer(line): + dsp_keys = ['dataspace_name', 'data_space_address_entry'] key, value = match.groups() if key in keys: key = keys[key] - if current_task.get(key.lower()): - current_task[key.lower()] = [current_task[key.lower()], value] + if key in dsp_keys: + data_space[key] = value + # key_val = current_task.get(key.lower()) + # if key_val: + # if isinstance(key_val, str): + # current_task[key.lower()] = [key_val, value] + # elif isinstance(key_val, list): + # current_task[key.lower()] = key_val + [value] else: current_task[key.lower()] = value + if current_task.get("dataspaces"): + current_task["dataspaces"] = current_task["dataspaces"] + [data_space] + elif data_space: + current_task["dataspaces"] = [data_space] if current_task: el_time = current_task.get('elapsed_time') if el_time: @@ -758,8 +767,6 @@ def extract_keys(stdout): def parse_time(ts_str): # Case 1: Duration like "000.005seconds" - print("hiiiii") - print(ts_str) sec_match = re.match(r"^(\d+\.?\d*)\s*S?$", ts_str, re.IGNORECASE) if sec_match: return timedelta(seconds=float(sec_match.group(1))) @@ -776,13 +783,13 @@ def parse_time(ts_str): def calculate_start_time(ts_str): - now = datetime.now() + now = datetime.now().astimezone() parsed = parse_time(ts_str) if parsed is None: return "" # If it's a timedelta (duration), subtract from now → absolute datetime if isinstance(parsed, timedelta): - return f"{(now - parsed).strftime('%Y-%m-%d %H:%M:%S')}" + return f"{now - parsed}" def fetch_logs(command, timeout): @@ -912,7 +919,7 @@ def run_module(): 'type': 'str', 'required': False }, - 'wait_time_s': { + 'wait_time': { 'type': 'int', 'required': False, 'default': 0 @@ -1009,7 +1016,7 @@ def run_module(): 'arg_type': 'str', 'required': False }, - 'wait_time_s': { + 'wait_time': { 'arg_type': 'int', 'required': False } @@ -1025,7 +1032,7 @@ def run_module(): stderr=str(err) ) state = module.params.get('state') - wait_time_s = module.params.get('wait_time_s') + wait_time_s = module.params.get('wait_time') verbose = module.params.get('verbose') kwargs = {} """ @@ -1048,11 +1055,8 @@ def run_module(): force_errmsg = ['NOT ACTIVE', 'NOT LOGGED ON', 'INVALID PARAMETER', 'CANCELABLE', 'DUPLICATE NAME FOUND'] err_msg = [] kwargs = {} - use_wait_arg = False - if zoau_version_checker.is_zoau_version_higher_than("1.2.4"): - use_wait_arg = True - if use_wait_arg or wait_time_s: + if wait_time_s: kwargs.update({"wait": True}) cmd = "" diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index b8349e60d2..be5ebb4e7a 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -25,3 +25,4 @@ plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Lice plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_started_task.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index ab5f45a7cb..29abd16b32 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -26,3 +26,4 @@ plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Lice plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_started_task.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/sanity/ignore-2.17.txt b/tests/sanity/ignore-2.17.txt index d682068ed5..874f4f684c 100644 --- a/tests/sanity/ignore-2.17.txt +++ b/tests/sanity/ignore-2.17.txt @@ -26,3 +26,4 @@ plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Lice plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_started_task.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.18.txt b/tests/sanity/ignore-2.18.txt index 52cbed2aa3..048fb8e797 100644 --- a/tests/sanity/ignore-2.18.txt +++ b/tests/sanity/ignore-2.18.txt @@ -25,4 +25,5 @@ plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Lic plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file +plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_started_task.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/sanity/ignore-2.19.txt b/tests/sanity/ignore-2.19.txt index 52cbed2aa3..048fb8e797 100644 --- a/tests/sanity/ignore-2.19.txt +++ b/tests/sanity/ignore-2.19.txt @@ -25,4 +25,5 @@ plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Lic plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file +plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_started_task.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file From 408f11ae26a4579f6058c59e131a0652d20a8542 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Fri, 12 Sep 2025 15:55:33 -0600 Subject: [PATCH 43/73] [Enhancement][2.0]Updated zos_mount return values (#2272) * Updated zos_mount * Updated workflow * Updated mount to return state * Removed a print message * Updated changelogs * Update 2272-update-interface-zos_mount.yml --- .github/workflows/ac-module-doc.yml | 2 +- .../2272-update-interface-zos_mount.yml | 8 ++++++++ plugins/modules/zos_mount.py | 17 +++++++++++------ 3 files changed, 20 insertions(+), 7 deletions(-) create mode 100644 changelogs/fragments/2272-update-interface-zos_mount.yml diff --git a/.github/workflows/ac-module-doc.yml b/.github/workflows/ac-module-doc.yml index 3c552fd23a..7365184ef5 100644 --- a/.github/workflows/ac-module-doc.yml +++ b/.github/workflows/ac-module-doc.yml @@ -30,7 +30,7 @@ jobs: - name: Install dependencies run: | source venv/bin/activate - pip install ansible + pip install ansible==11.9.0 pip install ansible-doc-extractor - name: Run ac-module-doc diff --git a/changelogs/fragments/2272-update-interface-zos_mount.yml b/changelogs/fragments/2272-update-interface-zos_mount.yml new file mode 100644 index 0000000000..3babd6d11a --- /dev/null +++ b/changelogs/fragments/2272-update-interface-zos_mount.yml @@ -0,0 +1,8 @@ +breaking_changes: + - zos_mount - Return values ``persistent.addDataset`` and ``persistent.delDataset`` are deprecated in favor of ``persistent.name`` and + ``persistent.state``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2272). +minor_changes: + - zos_mount - New return value ``persistent.state`` that returns the entry state in the persistent data set. + New return value ``persistent.name`` that returns the persistent data set name. + (https://github.com/ansible-collections/ibm_zos_core/pull/2272). diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index 9e14d88915..825c76a8fe 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -463,6 +463,13 @@ type: list sample: - [u'I did this because..'] + state: + description: + - The state of the persistent entry in the persistent data set. + - Possible values are C(added) and C(removed). + returned: always + type: str + sample: added unmount_opts: description: Describes how the unmount is to be performed. returned: changed and if state=unmounted @@ -768,11 +775,10 @@ def run_module(module, arg_def): backup_name = mt_backupOper(module, name, backup_code, tmphlq) res_args["backup_name"] = backup_name del persistent["backup"] - if "mounted" in state or "present" in state: - persistent["addDataset"] = name + if state == "mounted" or state == "present": + persistent["state"] = "added" else: - persistent["delDataset"] = name - del persistent["name"] + persistent["state"] = "removed" write_persistent = False if "mounted" in state or "present" in state or "absent" in state: @@ -795,7 +801,7 @@ def run_module(module, arg_def): path=path, fs_type=fs_type, state=state, - persistent=parsed_args.get("persistent"), + persistent=persistent, unmount_opts=unmount_opts, mount_opts=mount_opts, src_params=src_params, @@ -813,7 +819,6 @@ def run_module(module, arg_def): stderr="", ) ) - # data set to be mounted/unmounted must exist fs_du = data_set.DataSetUtils(src, tmphlq=tmphlq) fs_exists = fs_du.exists() From 712828b936b998c4493f56f2214ba90a15c5d59c Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Mon, 15 Sep 2025 23:25:11 +0530 Subject: [PATCH 44/73] Update zos_started_task.py --- plugins/modules/zos_started_task.py | 395 +++++++++++++++++++++++----- 1 file changed, 333 insertions(+), 62 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index f82dfc4a2d..a93052f448 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -29,78 +29,97 @@ options: arm: description: - - I(arm) indicates to execute normal task termination routines without causing address space destruction. + - I(arm) indicates to execute normal task termination routines without causing address space destruction. required: false type: bool armrestart: description: - - I(armrestart) indicates to restart a started task automatically after the cancel completes. + - Indicates that the batch job or started task should be automatically restarted after the cancel + completes, if it is registered as an element of the automatic restart manager. If the job or + task is not registered or if you do not specify this parameter, MVS will not automatically + restart the job or task. + Only applicable when state is cancelled or forced, otherwise is ignored. required: false type: bool asid: description: - - I(asid) is a unique address space identifier which gets assigned to each running started task. + - When state is cancelled or stopped or forced, asid is the hexadecimal address space + identifier of the work unit you want to cancel, stop or force. + - When state=displayed asid is the hexadecimal address space identifier of the work unit of + the task you get details from. required: false type: str device_type: description: - - I(device_type) is the type of the output device (if any) associated with the task. + - Option device_type is the type of the output device (if any) associated with the task. + Only applicable when state=started otherwise ignored. required: false type: str device_number: description: - - I(device_number) is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. - A slash (/) must precede a 4-digit number but is not before a 3-digit number. + - Option device_number is the number of the device to be started. A device number is 3 or 4 + hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit + number. + Only applicable when state=started otherwise ignored. required: false type: str dump: description: - - I(dump) indicates to take dump before ending a started task. + - A dump is to be taken. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) + depends on the JCL for the job. + Only applicable when state=cancelled otherwise ignored. required: false type: bool identifier_name: description: - - I(identifier_name) is the name that identifies the task to be started. This name can be up to 8 characters long. - The first character must be alphabetical. + - Option identifier_name is the name that identifies the task. This name can be up to 8 + characters long. The first character must be alphabetical. required: false type: str aliases: - - identifier + - identifier job_account: description: - - I(job_account) specifies accounting data in the JCL JOB statement for the started task. - If the source JCL was a job and has already accounting data, the value that is specified on this parameter - overrides the accounting data in the source JCL. + - Option job_account specifies accounting data in the JCL JOB statement for the started + task. If the source JCL was a job and has already accounting data, the value that is + specified on this parameter overrides the accounting data in the source JCL. + Only applicable when state=started otherwise ignored. required: false type: str job_name: description: - - I(job_name) is a name which should be assigned to a started task while starting it. If job_name is not specified, - then member_name is used as job_name. + - When state=started job_name is a name which should be assigned to a started task + while starting it. If job_name is not specified, then member_name is used as job_name. + Otherwise, job_name is the started task job name used to find and apply the state + selected. required: false type: str aliases: - - job - - task - - task_name + - job + - task + - task_name keyword_parameters: description: - - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. - The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than - 44 characters in length. + - Any appropriate keyword parameter that you specify to override the corresponding + parameter in the cataloged procedure. The maximum length of each keyword=option is 66 + characters. No individual value within this field can be longer than 44 characters in length. + Only applicable when state=started otherwise ignored. required: false type: dict member_name: description: - - I(member_name) is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL - for the task to be started. The member can be either a job or a cataloged procedure. + - Option member_name is a 1 - 8 character name of a member of a partitioned data set that + contains the source JCL for the task to be started. The member can be either a job or a + cataloged procedure. + Only applicable when state=started otherwise ignored. required: false type: str aliases: - - member + - member parameters: description: - - Program parameters passed to the started program, which might be a list in parentheses or a string in single quotation marks + - Program parameters passed to the started program, which might be a list in parentheses or + a string in single quotation marks required: false type: list elements: str @@ -110,21 +129,28 @@ required: false type: str choices: - - 'YES' - - 'NO' + - 'YES' + - 'NO' reus_asid: description: - - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, - a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified - on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. + - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, + a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified + on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. required: false type: str choices: - - 'YES' - - 'NO' + - 'YES' + - 'NO' state: description: - - The final state desired for specified started task. + - The desired state the started task should be after the module is executed. + If state=started and the started task is not found on the managed node, no action is taken, + module completes successfully with changed=False. + If state is cancelled , stopped or forced and the started task is not running on the + managed node, no action is taken, module completes successfully with changed=False. + If state is modified and the started task is not running, not found or modification was not + done, the module will fail. + If state is displayed the module will return the started task details. required: True type: str choices: @@ -136,8 +162,9 @@ - forced subsystem: description: - - The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, - which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. + - The name of the subsystem that selects the task for processing. The name must be 1 - 4 + characters, which are defined in the IEFSSNxx parmlib member, and the subsystem must + be active. required: false type: str tcb_address: @@ -147,33 +174,112 @@ type: str volume_serial: description: - - If devicetype is a tape or direct-access device, the volume serial number of the volume is mounted on the device. + - If devicetype is a tape or direct-access device, the volume serial number of the volume is + mounted on the device. + Only applicable when state=started otherwise ignored. required: false type: str userid: description: - - I(userid) is the user ID of the time-sharing user you want to cancel. + - The user ID of the time-sharing user you want to cancel or force. + Only applicable when state=cancelled or state=forced , otherwise ignored. required: false type: str verbose: description: - - Return System logs that describe the task's execution. + - When verbose=true return system logs that describe the task’s execution. + Using this option will can return a big response depending on system’s load, also it could + surface other programs activity. required: false type: bool default: false wait_time: + description: + - Option wait_time is the total time that module zos_started_tak will wait for a submitted + task. The time begins when the module is executed on the managed node. Default value of 0 + means to wait the default amount of time supported by the opercmd utility. required: false default: 0 type: int - description: - - Option I(wait_time) is the the maximum amount of time, in seconds, to wait for a response after submitting - the console command. Default value of 0 means to wait the default amount of time supported by the opercmd utility. + +attributes: + action: + support: none + description: Indicates this has a corresponding action plugin so some parts of the options can be executed on the controller. + async: + support: full + description: Supports being used with the ``async`` keyword. + check_mode: + support: full + description: Can run in check_mode and return changed status prediction without modifying target. If not supported, the action will be skipped. """ EXAMPLES = r""" - name: Start a started task using member name. zos_started_task: + state: "started" + member: "PROCAPP" +- name: Start a started task using member name and identifier. + zos_started_task: + state: "started" + member: "PROCAPP" + identifier: "SAMPLE" +- name: Start a started task using member name and job. + zos_started_task: + state: "started" member: "PROCAPP" - operation: "start" + job_name: "SAMPLE" +- name: Start a started task using member name, job and enable verbose. + zos_started_task: + state: "started" + member: "PROCAPP" + job_name: "SAMPLE" + verbose: True +- name: Start a started task using member name, subsystem and enable reuse asid. + zos_started_task: + state: "started" + member: "PROCAPP" + subsystem: "MSTR" + reus_asid: "YES" +- name: Display a started task using started task name. + zos_started_task: + state: "displayed" + task_name: "PROCAPP" +- name: Display started tasks using matching regex. + zos_started_task: + state: "displayed" + task_name: "s*" +- name: Display all started tasks. + zos_started_task: + state: "displayed" + task_name: "all" +- name: Cancel a started tasks using task name. + zos_started_task: + state: "cancelled" + task_name: "SAMPLE" +- name: Cancel a started tasks using task name and asid. + zos_started_task: + state: "cancelled" + task_name: "SAMPLE" + asid: 0014 +- name: Cancel a started tasks using task name and asid. + zos_started_task: + state: "modified" + task_name: "SAMPLE" + parameters: ["XX=12"] +- name: Stop a started task using task name. + zos_started_task: + state: "stopped" + task_name: "SAMPLE" +- name: Stop a started task using task name, identifier and asid. + zos_started_task: + state: "stopped" + task_name: "SAMPLE" + identifier: "SAMPLE" + asid: 00A5 +- name: Force a started task using task name. + zos_started_task: + state: "forced" + task_name: "SAMPLE" """ RETURN = r""" @@ -206,6 +312,26 @@ returned: changed type: str sample: S SAMPLE +stderr: + description: The STDERR from the command, may be empty. + returned: changed + type: str + sample: An error has ocurred. +stderr_lines: + description: List of strings containing individual lines from STDERR. + returned: changed + type: list + sample: ["An error has ocurred"] +stdout: + description: The STDOUT from the command, may be empty. + returned: changed + type: str + sample: ISF031I CONSOLE OMVS0000 ACTIVATED. +stdout_lines: + description: List of strings containing individual lines from STDOUT. + returned: changed + type: list + sample: ["Allocation to SYSEXEC completed."] tasks: description: The output information for a list of started tasks matching specified criteria. @@ -214,32 +340,177 @@ type: list elements: dict contains: - address_space_table_entry: + address_space_second_table_entry: + description: + The control block used to manage memory for a started task + type: str + sample: 03E78500 + affinity: + description: + The identifier of the processor, for up to any four processors, if the job requires the services of specific processors. + affinity=NONE means the job can run on any processor. + type: str + sample: NONE + asid: + description: + Address space identifier (ASID), in hexadecimal. + type: str + sample: 0054 + cpu_time: + description: + The processor time used by the address space, including the initiator. This time does not include SRB time. + nnnnnnnn has one of these formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours: + sss.tttS + When time is less than 1000 seconds + hh.mm.ss + When time is at least 1000 seconds, but less than 100 hours + hhhhh.mm + When time is at least 100 hours + ******** + When time exceeds 100000 hours + NOTAVAIL + When the TOD clock is not working + type: str + sample: 000.008S + dataspaces: + description: + The started task dataspaces details. + returned: success + type: list + elements: dict + contains: + data_space_address_entry: + description: + Central address of the data space ASTE. + type: str + sample: 058F2180 + dataspace_name: + description: + Data space name associated with the address space. + type: str + sample: CIRRGMAP + domain_number: + description: + domain_number=N/A if the system is operating in goal mode. + type: str + sample: N/A + elapsed_time: + description: + -> For address spaces other than system address spaces, the elapsed time since job select time. + -> For system address spaces created before master scheduler initialization, the elapsed time since master scheduler initialization. + -> For system address spaces created after master scheduler initialization, the elapsed time since system address space creation. + sss.tttS + When time is less than 1000 seconds + hh.mm.ss + When time is at least 1000 seconds, but less than 100 hours + hhhhh.mm + When time is at least 100 hours + ******** + When time exceeds 100000 hours + NOTAVAIL + When the TOD clock is not working + type: str + sample: 812.983S + priority: + description: + The priority of a started task is determined by the Workload Manager (WLM), based on the service class and importance assigned to it. + type: str + sample: 1 + proc_step_name: + description: + One of the following: + -> For APPC-initiated transactions, the user ID requesting the transaction. + -> The name of a step within a cataloged procedure that was called by the step specified in field sss. + -> Blank, if there is no cataloged procedure. + -> The identifier of the requesting transaction program. + type: str + sample: VLF + program_event_recording: + description: + YES if A PER trap is active in the address space. + NO if No PER trap is active in the address space. + type: str + sample: NO + program_name: + description: + program_name=N/A if the system is operating in goal mode. + type: str + sample: N/A + queue_scan_count: + description: + YES if the address space has been quiesced. + NO if the address space is not quiesced. + type: str + sample: NO + resource_group: + description: + The name of the resource group currently associated the service class. It can also be N/A if there is no resource group association. + type: str + sample: N/A + server: + description: + YES if the address space is a server. + No if the address space is not a server. + type: str + sample: NO + started_class_list: + description: + The name of the service class currently associated with the address space. + type: str + sample: SYSSTC + started_time: + description: + The time when the started task started. + type: str + sample: 2025-09-11 18:21:50.293644+00:00 + system_management_control: + description: + Number of outstanding step-must-complete requests. + type: str + sample: 000 + task_identifier: + description: + One of the following: + -> The name of a system address space. + -> The name of a step, for a job or attached APPC transaction program attached by an initiator. + -> The identifier of a task created by the START command. + -> The name of a step that called a cataloged procedure. + -> STARTING, if initiation of a started job, system task, or attached APPC transaction program is incomplete. + -> *MASTER*, for the master address space. + -> The name of an initiator address space. + type: str + sample: SPROC + task_name: description: The name of the started task. type: str sample: SAMPLE -stdout: - description: The STDOUT from the command, may be empty. - returned: changed - type: str - sample: ISF031I CONSOLE OMVS0000 ACTIVATED. -stderr: - description: The STDERR from the command, may be empty. - returned: changed - type: str - sample: An error has ocurred. -stdout_lines: - description: List of strings containing individual lines from STDOUT. - returned: changed - type: list - sample: ["Allocation to SYSEXEC completed."] -stderr_lines: - description: List of strings containing individual lines from STDERR. + task_status: + description: + The status of the task can be one of the following. + -> IN for swapped in. + -> OUT for swapped out, ready to run. + -> OWT for swapped out, waiting, not ready to run. + -> OU* for in process of being swapped out. + -> IN* for in process of being swapped in. + -> NSW for non-swappable. + type: str + sample: NSW + task_type: + description: + S for started task. + type: str + sample: S + workload_manager: + description: + The name of the workload currently associated with the address space. + type: str + sample: SYSTEM +verbose_output: + description: If C(verbose=true), the system log related to the started task executed state will be shown. returned: changed type: list - sample: ["An error has ocurred"] - + sample: NC0000000 ZOSMACHINE 25240 12:40:30.15 OMVS0000 00000210.... """ from ansible.module_utils.basic import AnsibleModule From da3e8101694acc74774af2fdf0fdbbd2a00b94b2 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Tue, 16 Sep 2025 00:02:25 +0530 Subject: [PATCH 45/73] Update zos_started_task.py --- plugins/modules/zos_started_task.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index a93052f448..a56174e112 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -530,11 +530,6 @@ except ImportError: zoau_exceptions = ZOAUImportError(traceback.format_exc()) -# try: -# from zoautil_py import exceptions as zoau_exceptions -# except ImportError: -# zoau_exceptions = ZOAUImportError(traceback.format_exc()) - def execute_command(operator_cmd, started_task_name, execute_display_before=False, execute_display_after=False, timeout_s=1, **kwargs): """Execute operator command. From 0c19b4f7f8b3b9f7bd1d80755aece3a01ac74310 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Wed, 17 Sep 2025 16:30:04 +0530 Subject: [PATCH 46/73] Update zos_started_task.py --- plugins/modules/zos_started_task.py | 145 +++++++++++++--------------- 1 file changed, 68 insertions(+), 77 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index a56174e112..8629a546e3 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -19,7 +19,7 @@ DOCUMENTATION = r""" module: zos_started_task -version_added: 1.16.0 +version_added: 2.0.0 author: - "Ravella Surendra Babu (@surendrababuravella)" short_description: Perform operations on started tasks. @@ -38,21 +38,21 @@ completes, if it is registered as an element of the automatic restart manager. If the job or task is not registered or if you do not specify this parameter, MVS will not automatically restart the job or task. - Only applicable when state is cancelled or forced, otherwise is ignored. + - Only applicable when state is cancelled or forced, otherwise is ignored. required: false type: bool asid: description: - When state is cancelled or stopped or forced, asid is the hexadecimal address space identifier of the work unit you want to cancel, stop or force. - - When state=displayed asid is the hexadecimal address space identifier of the work unit of + - When state=displayed, asid is the hexadecimal address space identifier of the work unit of the task you get details from. required: false type: str device_type: description: - Option device_type is the type of the output device (if any) associated with the task. - Only applicable when state=started otherwise ignored. + - Only applicable when state=started otherwise ignored. required: false type: str device_number: @@ -60,14 +60,14 @@ - Option device_number is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. - Only applicable when state=started otherwise ignored. + - Only applicable when state=started otherwise ignored. required: false type: str dump: description: - A dump is to be taken. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) depends on the JCL for the job. - Only applicable when state=cancelled otherwise ignored. + - Only applicable when state=cancelled otherwise ignored. required: false type: bool identifier_name: @@ -83,7 +83,7 @@ - Option job_account specifies accounting data in the JCL JOB statement for the started task. If the source JCL was a job and has already accounting data, the value that is specified on this parameter overrides the accounting data in the source JCL. - Only applicable when state=started otherwise ignored. + - Only applicable when state=started otherwise ignored. required: false type: str job_name: @@ -103,7 +103,7 @@ - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than 44 characters in length. - Only applicable when state=started otherwise ignored. + - Only applicable when state=started otherwise ignored. required: false type: dict member_name: @@ -111,7 +111,7 @@ - Option member_name is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL for the task to be started. The member can be either a job or a cataloged procedure. - Only applicable when state=started otherwise ignored. + - Only applicable when state=started otherwise ignored. required: false type: str aliases: @@ -125,7 +125,8 @@ elements: str retry: description: - - I(retry) is applicable for only FORCE TCB. + - I(retry) is applicable for only FORCE TCB. + - Only applicable when state=forced otherwise ignored. required: false type: str choices: @@ -136,6 +137,7 @@ - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. + - Only applicable when state=started otherwise ignored. required: false type: str choices: @@ -143,23 +145,23 @@ - 'NO' state: description: - - The desired state the started task should be after the module is executed. - If state=started and the started task is not found on the managed node, no action is taken, - module completes successfully with changed=False. - If state is cancelled , stopped or forced and the started task is not running on the - managed node, no action is taken, module completes successfully with changed=False. - If state is modified and the started task is not running, not found or modification was not - done, the module will fail. - If state is displayed the module will return the started task details. + - The desired state the started task should be after the module is executed. + - If state=started and the started task is not found on the managed node, no action is taken, + module completes successfully with changed=False. + - If state is cancelled , stopped or forced and the started task is not running on the + managed node, no action is taken, module completes successfully with changed=False. + - If state is modified and the started task is not running, not found or modification was not + done, the module will fail. + - If state is displayed the module will return the started task details. required: True type: str choices: - - started - - displayed - - modified - - cancelled - - stopped - - forced + - started + - displayed + - modified + - cancelled + - stopped + - forced subsystem: description: - The name of the subsystem that selects the task for processing. The name must be 1 - 4 @@ -169,26 +171,27 @@ type: str tcb_address: description: - - I(tcb_address) is a 6-digit hexadecimal TCB address of the task to terminate. + - I(tcb_address) is a 6-digit hexadecimal TCB address of the task to terminate. + - Only applicable when state=forced otherwise ignored. required: false type: str volume_serial: description: - If devicetype is a tape or direct-access device, the volume serial number of the volume is mounted on the device. - Only applicable when state=started otherwise ignored. + - Only applicable when state=started otherwise ignored. required: false type: str userid: description: - The user ID of the time-sharing user you want to cancel or force. - Only applicable when state=cancelled or state=forced , otherwise ignored. + - Only applicable when state=cancelled or state=forced , otherwise ignored. required: false type: str verbose: description: - - When verbose=true return system logs that describe the task’s execution. - Using this option will can return a big response depending on system’s load, also it could + - When verbose=true return system logs that describe the task execution. + Using this option will can return a big response depending on system load, also it could surface other programs activity. required: false type: bool @@ -359,17 +362,12 @@ cpu_time: description: The processor time used by the address space, including the initiator. This time does not include SRB time. - nnnnnnnn has one of these formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours: - sss.tttS - When time is less than 1000 seconds - hh.mm.ss - When time is at least 1000 seconds, but less than 100 hours - hhhhh.mm - When time is at least 100 hours - ******** - When time exceeds 100000 hours - NOTAVAIL - When the TOD clock is not working + cpu_time has one of these below formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. + sss.tttS when time is less than 1000 seconds + hh.mm.ss when time is at least 1000 seconds, but less than 100 hours + hhhhh.mm when time is at least 100 hours + ******** when time exceeds 100000 hours + NOTAVAIL when the TOD clock is not working type: str sample: 000.008S dataspaces: @@ -396,19 +394,15 @@ sample: N/A elapsed_time: description: - -> For address spaces other than system address spaces, the elapsed time since job select time. - -> For system address spaces created before master scheduler initialization, the elapsed time since master scheduler initialization. - -> For system address spaces created after master scheduler initialization, the elapsed time since system address space creation. - sss.tttS - When time is less than 1000 seconds - hh.mm.ss - When time is at least 1000 seconds, but less than 100 hours - hhhhh.mm - When time is at least 100 hours - ******** - When time exceeds 100000 hours - NOTAVAIL - When the TOD clock is not working + - For address spaces other than system address spaces, the elapsed time since job select time. + - For system address spaces created before master scheduler initialization, the elapsed time since master scheduler initialization. + - For system address spaces created after master scheduler initialization, the elapsed time since system address space creation. + elapsed_time has one of these below formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. + sss.tttS when time is less than 1000 seconds + hh.mm.ss when time is at least 1000 seconds, but less than 100 hours + hhhhh.mm when time is at least 100 hours + ******** when time exceeds 100000 hours + NOTAVAIL when the TOD clock is not working type: str sample: 812.983S priority: @@ -418,11 +412,10 @@ sample: 1 proc_step_name: description: - One of the following: - -> For APPC-initiated transactions, the user ID requesting the transaction. - -> The name of a step within a cataloged procedure that was called by the step specified in field sss. - -> Blank, if there is no cataloged procedure. - -> The identifier of the requesting transaction program. + - For APPC-initiated transactions, the user ID requesting the transaction. + - The name of a step within a cataloged procedure that was called by the step specified in field sss. + - Blank, if there is no cataloged procedure. + - The identifier of the requesting transaction program. type: str sample: VLF program_event_recording: @@ -462,7 +455,7 @@ description: The time when the started task started. type: str - sample: 2025-09-11 18:21:50.293644+00:00 + sample: "2025-09-11 18:21:50.293644+00:00" system_management_control: description: Number of outstanding step-must-complete requests. @@ -470,40 +463,38 @@ sample: 000 task_identifier: description: - One of the following: - -> The name of a system address space. - -> The name of a step, for a job or attached APPC transaction program attached by an initiator. - -> The identifier of a task created by the START command. - -> The name of a step that called a cataloged procedure. - -> STARTING, if initiation of a started job, system task, or attached APPC transaction program is incomplete. - -> *MASTER*, for the master address space. - -> The name of an initiator address space. + - The name of a system address space. + - The name of a step, for a job or attached APPC transaction program attached by an initiator. + - The identifier of a task created by the START command. + - The name of a step that called a cataloged procedure. + - STARTING if initiation of a started job, system task, or attached APPC transaction program is incomplete. + - MASTER* for the master address space. + - The name of an initiator address space. type: str sample: SPROC task_name: description: - The name of the started task. + - The name of the started task. type: str sample: SAMPLE task_status: description: - The status of the task can be one of the following. - -> IN for swapped in. - -> OUT for swapped out, ready to run. - -> OWT for swapped out, waiting, not ready to run. - -> OU* for in process of being swapped out. - -> IN* for in process of being swapped in. - -> NSW for non-swappable. + - IN for swapped in. + - OUT for swapped out, ready to run. + - OWT for swapped out, waiting, not ready to run. + - OU* for in process of being swapped out. + - IN* for in process of being swapped in. + - NSW for non-swappable. type: str sample: NSW task_type: description: - S for started task. + - S for started task. type: str sample: S workload_manager: description: - The name of the workload currently associated with the address space. + - The name of the workload currently associated with the address space. type: str sample: SYSTEM verbose_output: From 1b3967f9924b0bcfdaf6c9e4f04e03079a58896c Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Wed, 17 Sep 2025 09:54:27 -0600 Subject: [PATCH 47/73] Update galaxy.yml (#2288) --- galaxy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/galaxy.yml b/galaxy.yml index 3780418a2c..37d644ab37 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -6,7 +6,7 @@ namespace: ibm name: ibm_zos_core # The collection version -version: "1.15.0-beta.1" +version: "2.0.0-beta.1" # Collection README file readme: README.md From 203cde7f64225d239b86df2040f6f42cdbc3629e Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Thu, 18 Sep 2025 18:39:37 +0530 Subject: [PATCH 48/73] Updating testcases --- plugins/modules/zos_started_task.py | 17 ++- .../modules/test_zos_started_task_func.py | 123 ++++++++++-------- 2 files changed, 76 insertions(+), 64 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 8629a546e3..10fe006096 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -542,7 +542,7 @@ def execute_command(operator_cmd, started_task_name, execute_display_before=Fals Tuple containing the RC, standard out, standard err of the query script and started task parameters. """ - task_params = {} + task_params = [] # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: timeout_c = 100 * timeout_s if execute_display_before: @@ -1289,6 +1289,7 @@ def run_module(): stderr=str(err) ) state = module.params.get('state') + userid = module.params.get('userid') wait_time_s = module.params.get('wait_time') verbose = module.params.get('verbose') kwargs = {} @@ -1305,10 +1306,10 @@ def run_module(): CANCELABLE: When force command used without using cancel command """ start_errmsg = ['ERROR', 'INVALID PARAMETER'] - stop_errmsg = ['NOT ACTIVE'] - display_errmsg = ['NOT ACTIVE'] - modify_errmsg = ['REJECTED', 'NOT ACTIVE'] - cancel_errmsg = ['NOT ACTIVE', 'NOT LOGGED ON', 'INVALID PARAMETER', 'DUPLICATE NAME FOUND'] + stop_errmsg = ['NOT ACTIVE', 'INVALID PARAMETER'] + display_errmsg = ['NOT ACTIVE', 'INVALID PARAMETER'] + modify_errmsg = ['REJECTED', 'NOT ACTIVE', 'INVALID PARAMETER'] + cancel_errmsg = ['NOT ACTIVE', 'NOT LOGGED ON', 'INVALID PARAMETER', 'DUPLICATE NAME FOUND', 'NON-CANCELABLE'] force_errmsg = ['NOT ACTIVE', 'NOT LOGGED ON', 'INVALID PARAMETER', 'CANCELABLE', 'DUPLICATE NAME FOUND'] err_msg = [] kwargs = {} @@ -1332,11 +1333,13 @@ def run_module(): err_msg = stop_errmsg started_task_name, cmd = prepare_stop_command(module) elif state == "cancelled": - execute_display_before = True + if not userid: + execute_display_before = True err_msg = cancel_errmsg started_task_name, cmd = prepare_cancel_command(module) elif state == "forced": - execute_display_before = True + if not userid: + execute_display_before = True err_msg = force_errmsg started_task_name, cmd = prepare_force_command(module) elif state == "modified": diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index 3c03fd1b62..1887438218 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -47,7 +47,6 @@ def test_start_task_with_invalid_member(ansible_zos_module): member_name = "SAMTASK" ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None @@ -56,7 +55,6 @@ def test_start_task_with_invalid_member(ansible_zos_module): member_name = "SAMPLETASK" ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("stderr") is not None @@ -70,7 +68,6 @@ def test_start_task_with_jobname_identifier(ansible_zos_module): identifier = "TESTER" ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -84,7 +81,6 @@ def test_start_task_with_invalid_identifier(ansible_zos_module): ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("stderr") is not None @@ -95,7 +91,6 @@ def test_start_task_with_invalid_identifier(ansible_zos_module): identifier = "HELLO" ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "S SAMPLE.HELLO" @@ -110,7 +105,6 @@ def test_start_task_with_invalid_jobaccount(ansible_zos_module): ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -124,7 +118,6 @@ def test_start_task_with_invalid_devicenum(ansible_zos_module): ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -138,7 +131,6 @@ def test_start_task_with_invalid_volumeserial(ansible_zos_module): ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "S SAMPLE,,12345A" @@ -152,7 +144,6 @@ def test_start_task_with_invalid_parameters(ansible_zos_module): ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "S SAMPLE,,,'KEY1'" @@ -165,7 +156,6 @@ def test_start_task_with_invalid_parameters(ansible_zos_module): ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "S SAMPLE,,123456,(KEY1,KEY2,KEY3)" @@ -179,7 +169,6 @@ def test_start_task_with_devicenum_devicetype_negative(ansible_zos_module): device_type = "TEST" ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -193,7 +182,6 @@ def test_start_task_with_invalid_subsystem_negative(ansible_zos_module): subsystem = "MSTRS" ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -209,7 +197,6 @@ def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): } ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -221,7 +208,6 @@ def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): } ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -234,7 +220,6 @@ def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): } ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == 'S VLF,KEY1=VALUE1,KEY2=VALUE2' @@ -248,7 +233,6 @@ def test_start_task_using_nonexisting_devicenum_negative(ansible_zos_module): device_number = "/ABCD" ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == 'S SAMPLE,/ABCD' @@ -260,7 +244,6 @@ def test_display_task_negative(ansible_zos_module): identifier = "SAMPLE" ) for result in display_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -273,7 +256,6 @@ def test_stop_task_negative(ansible_zos_module): job_name = "SAMPLE" ) for result in stop_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("stderr") is not None @@ -284,7 +266,6 @@ def test_stop_task_negative(ansible_zos_module): identifier = "SAMPLE" ) for result in stop_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "P TESTER.SAMPLE" @@ -296,7 +277,6 @@ def test_modify_task_negative(ansible_zos_module): identifier = "SAMPLE" ) for result in modify_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -306,7 +286,6 @@ def test_modify_task_negative(ansible_zos_module): job_name = "TESTER" ) for result in modify_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -318,7 +297,6 @@ def test_modify_task_negative(ansible_zos_module): parameters = ["REPLACE", "VX=10"] ) for result in modify_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "F TESTER.SAMPLE,REPLACE,VX=10" @@ -330,7 +308,6 @@ def test_cancel_task_negative(ansible_zos_module): identifier = "SAMPLE" ) for result in cancel_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -341,7 +318,6 @@ def test_cancel_task_negative(ansible_zos_module): identifier = "SAMPLE" ) for result in cancel_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "C TESTER.SAMPLE" @@ -354,7 +330,6 @@ def test_cancel_task_negative(ansible_zos_module): verbose=True ) for result in cancel_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "C U=OMVSTEST,A=0012,DUMP" @@ -365,7 +340,6 @@ def test_cancel_task_negative(ansible_zos_module): armrestart = True ) for result in cancel_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -377,7 +351,6 @@ def test_force_task_negative(ansible_zos_module): identifier = "SAMPLE" ) for result in force_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -388,7 +361,6 @@ def test_force_task_negative(ansible_zos_module): identifier = "SAMPLE" ) for result in force_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "FORCE TESTER.SAMPLE" @@ -398,7 +370,6 @@ def test_force_task_negative(ansible_zos_module): armrestart = True ) for result in force_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -409,7 +380,6 @@ def test_force_task_negative(ansible_zos_module): retry = "YES" ) for result in force_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -420,7 +390,6 @@ def test_force_task_negative(ansible_zos_module): retry = "YES" ) for result in force_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None @@ -432,7 +401,6 @@ def test_force_task_negative(ansible_zos_module): retry = "YES" ) for result in force_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "FORCE TESTER.SAMPLE,TCB=000678,RETRY=YES" @@ -444,7 +412,6 @@ def test_force_task_negative(ansible_zos_module): verbose=True ) for result in force_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "FORCE U=OMVSTEST,TCB=000678,RETRY=YES" @@ -476,7 +443,6 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -488,7 +454,6 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): task_name = "SAMPLE" ) for result in force_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "FORCE SAMPLE" @@ -500,7 +465,6 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): ) for result in stop_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -515,7 +479,6 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): reus_asid = "YES" ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -528,7 +491,6 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): task_name = "SAMPLE" ) for result in stop_results.contacted.values(): - print(result) assert result.get("changed") is False assert result.get("stderr") is not None assert len(result.get("tasks")) > 0 @@ -540,7 +502,6 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): identifier = "TESTER" ) for result in stop_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -555,7 +516,6 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -567,7 +527,6 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): task = "SAMPLE" ) for result in display_result.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -585,7 +544,6 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): ) for result in stop_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -597,9 +555,9 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): hosts.all.shell( cmd="drm {0}".format(data_set_name) ) - hosts.all.shell( - cmd="mrm '{0}(SAMPLE)'".format(PROC_PDS) - ) + # hosts.all.shell( + # cmd="mrm '{0}(SAMPLE)'".format(PROC_PDS) + # ) def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): try: @@ -627,7 +585,6 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -638,7 +595,6 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): ) for result in stop_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -660,7 +616,6 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): parameters = ["REPLACE" ,"NN=00"] ) for result in modify_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -671,7 +626,6 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): task = "VLF" ) for result in display_result.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -687,7 +641,6 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): asid = asid_val ) for result in stop_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -700,7 +653,6 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): subsystem = "MSTR" ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -712,7 +664,6 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): parameters = ["REPLACE" ,"NN=00"] ) for result in modify_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -724,7 +675,6 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): identifier = "TESTER" ) for result in stop_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -735,7 +685,6 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): subsystem = "MSTR" ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -768,7 +717,6 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): ) for result in start_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -780,7 +728,6 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): ) for result in stop_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -792,4 +739,66 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): ) hosts.all.shell( cmd="mrm '{0}(SAMPLE2)'".format(PROC_PDS) - ) \ No newline at end of file + ) + +def test_force_and_start_with_icsf_task(ansible_zos_module): + hosts = ansible_zos_module + display_results = hosts.all.zos_started_task( + state = "displayed", + task = "ICSF" + ) + for result in display_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert result.get("cmd") == "D A,ICSF" + assert len(result.get("tasks")) > 0 + + cancel_results = hosts.all.zos_started_task( + state = "cancelled", + task = "ICSF" + ) + for result in cancel_results.contacted.values(): + assert result.get("changed") is False + assert result.get("rc") == 1 + assert result.get("stderr") != "" + + asid = result.get("tasks")[0].get("asid") + force_results = hosts.all.zos_started_task( + state = "forced", + task = "ICSF", + identifier = "ICSF", + asid = asid, + arm = True + ) + for result in force_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert result.get("cmd") == f"FORCE ICSF.ICSF,A={asid},ARM" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "ICSF" + ) + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert result.get("cmd") == "S ICSF" + assert len(result.get("tasks")) > 0 + +# This testcase will be successful when a TSO session with user 'OMVSADM' is open. +# def test_cancel_using_userid(ansible_zos_module): +# hosts = ansible_zos_module +# display_results = hosts.all.zos_started_task( +# state = "cancelled", +# userid = "OMVSADM" +# ) +# for result in display_results.contacted.values(): +# print(result) +# assert result.get("changed") is True +# assert result.get("rc") == 0 +# assert result.get("stderr") == "" +# assert result.get("cmd") == "C U=OMVSADM" +# assert len(result.get("tasks")) > 0 From dd58e0d1be07b87ce886c515b39d199b45a53393 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Thu, 18 Sep 2025 18:44:29 +0530 Subject: [PATCH 49/73] Update zos_started_task.py --- plugins/modules/zos_started_task.py | 1 + 1 file changed, 1 insertion(+) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 10fe006096..adfe4521d6 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -1303,6 +1303,7 @@ def run_module(): REJECTED: When modify command is not supported by respective started task. NOT LOGGED ON: When invalid userid passed in command. DUPLICATE NAME FOUND: When multiple started tasks exist with same name. + NON-CANCELABLE: When cancel command can't stop job and force command is needed. CANCELABLE: When force command used without using cancel command """ start_errmsg = ['ERROR', 'INVALID PARAMETER'] From a5f1325d669863807b53f2d2ea5ae205de1238be Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Thu, 18 Sep 2025 21:46:29 +0530 Subject: [PATCH 50/73] Update test_zos_started_task_func.py --- tests/functional/modules/test_zos_started_task_func.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index 1887438218..6d922fa021 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -555,9 +555,9 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): hosts.all.shell( cmd="drm {0}".format(data_set_name) ) - # hosts.all.shell( - # cmd="mrm '{0}(SAMPLE)'".format(PROC_PDS) - # ) + hosts.all.shell( + cmd="mrm '{0}(SAMPLE)'".format(PROC_PDS) + ) def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): try: From de2336ef7ff5bc0795df2bb88301eca3e77c8f74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Thu, 18 Sep 2025 16:26:57 -0500 Subject: [PATCH 51/73] [Enhancement][1537][zos_backup_restore]Add_special_words_bypass_disable_mng_strg_groups (#2276) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add new structure on sms restore test cases and add volumes * Remove prints * Add fragment * Fix unit testing for arguments * Add more validations * Fix documentation * Fix documentation * Fix documentation * Update volumes naming * Fix call to dzip * Fix assignation * Add extra validations * Add extra fix with stable sms groups * Add skip for cases not supported * Add skip for cases not supported --------- Co-authored-by: André Marcel Gutiérrez Benítez Co-authored-by: Fernando Flores --- ...l_words_bypass_disable_mng_strg_groups.yml | 10 + plugins/modules/zos_backup_restore.py | 298 +++++++++++++----- tests/conftest.py | 24 +- .../modules/test_zos_backup_restore.py | 208 +++++++----- tests/helpers/volumes.py | 89 +++++- tests/unit/test_zos_backup_restore_unit.py | 14 +- 6 files changed, 472 insertions(+), 171 deletions(-) create mode 100644 changelogs/fragments/2276-Add_special_words_bypass_disable_mng_strg_groups.yml diff --git a/changelogs/fragments/2276-Add_special_words_bypass_disable_mng_strg_groups.yml b/changelogs/fragments/2276-Add_special_words_bypass_disable_mng_strg_groups.yml new file mode 100644 index 0000000000..b8ed2147b5 --- /dev/null +++ b/changelogs/fragments/2276-Add_special_words_bypass_disable_mng_strg_groups.yml @@ -0,0 +1,10 @@ +breaking_changes: + - zos_backup_restore - Add new dictionary option ``sms`` to specify how interact with the storage class. + Option ``sms_storage_class`` is deprecated in favor of ``storage_class`` and is under new option ``sms`` being sms.storage_class. + Option ``sms_management_class`` is deprecated in favor of ``management_class`` and is under new option ``sms`` being sms.management_class. + (https://github.com/ansible-collections/ibm_zos_core/pull/2276) +minor_changes: + - zos_backup_restore - Adds ``disable_automatic_class`` to specify that automatic class selection routines will not be used to determine classes for provide list. + Adds ``disable_automatic_storage_class`` to specify the automatic class selection routines will not be used to determine the source data set storage class. + Adds ``disable_automatic_management_class`` to specify the automatic class selection routines will not be used to determine the source data set management class. + (https://github.com/ansible-collections/ibm_zos_core/pull/2276) \ No newline at end of file diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 210278e7f8..0cb02acf23 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -151,26 +151,70 @@ - If I(operation=backup) and if I(dataset=False) then option I(terse) must be True. type: bool default: True - sms_storage_class: + sms: description: - - When I(operation=restore), specifies the storage class to use. The storage class will - also be used for temporary data sets created during restore process. - - When I(operation=backup), specifies the storage class to use for temporary data sets - created during backup process. - - If neither of I(sms_storage_class) or I(sms_management_class) are specified, the z/OS - system's Automatic Class Selection (ACS) routines will be used. - type: str - required: False - sms_management_class: - description: - - When I(operation=restore), specifies the management class to use. The management class - will also be used for temporary data sets created during restore process. - - When I(operation=backup), specifies the management class to use for temporary data sets - created during backup process. - - If neither of I(sms_storage_class) or I(sms_management_class) are specified, the z/OS - system's Automatic Class Selection (ACS) routines will be used. - type: str - required: False + - Specifies how System Managed Storage (SMS) interacts with the storage class + and management class when either backup or restore operations are occurring. + - Storage class contains performance and availability attributes related to the storage occupied by the data set. + A data set that has a storage class assigned to it is defined as an 'SMS-managed' data set. + - Management class contains the data set attributes related to the migration and backup of the data set and the + expiration date of the data set. A management class can be assigned only to a data set that also has a + storage class assigned. + type: dict + required: false + suboptions: + storage_class: + description: + - When I(operation=restore), specifies the storage class to use. The storage class will + also be used for temporary data sets created during restore process. + - When I(operation=backup), specifies the storage class to use for temporary data sets + created during backup process. + - If neither of I(sms_storage_class) or I(sms_management_class) are specified, the z/OS + system's Automatic Class Selection (ACS) routines will be used. + type: str + required: False + management_class: + description: + - When I(operation=restore), specifies the management class to use. The management class + will also be used for temporary data sets created during restore process. + - When I(operation=backup), specifies the management class to use for temporary data sets + created during backup process. + - If neither of I(sms_storage_class) or I(sms_management_class) are specified, the z/OS + system's Automatic Class Selection (ACS) routines will be used. + type: str + required: False + disable_automatic_class: + description: + - Specifies that the automatic class selection (ACS) routines will not be + used to determine the target data set class names for the provided list. + - The list must contain fully or partially qualified data set names. + - To include all selected data sets, "**" in a list. + - You must have READ access to RACF FACILITY class profile + `STGADMIN.ADR.RESTORE.BYPASSACS` to use this option. + type: list + elements: str + required: false + default: [] + disable_automatic_storage_class: + description: + - Specifies that automatic class selection (ACS) routines will not be used + to determine the source data set storage class. + - Enabling I(disable_automatic_storage_class) ensures ACS is null. + - I(storage_class) and I(disable_automatic_storage_class) are mutually exclusive; you cannot use both. + - The combination of I(disable_automatic_storage_class) and C(disable_automatic_class=[dsn,dsn1,...]) + ensures the selected data sets will not be SMS-managed. + type: bool + required: false + default: false + disable_automatic_management_class: + description: + - Specifies that automatic class selection (ACS) routines will not be used + to determine the source data set management class. + - Enabling I(disable_automatic_storage_class) ensures ACS is null. + - I(management_class) and I(disable_automatic_management_class) are mutually exclusive; you cannot use both. + type: bool + required: false + default: false space: description: - If I(operation=backup), specifies the amount of space to allocate for the backup. @@ -385,19 +429,45 @@ operation: restore volume: MYVOL2 backup_name: /tmp/temp_backup.dzp - sms_storage_class: DB2SMS10 - sms_management_class: DB2SMS10 + sms: + storage_class: DB2SMS10 + management_class: DB2SMS10 + +- name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. + Disable for all datasets SMS storage and management classes data sets. + zos_backup_restore: + operation: restore + volume: MYVOL2 + backup_name: /tmp/temp_backup.dzp + sms: + disable_automatic_class: + - "**" + disable_automatic_storage_class: true + disable_automatic_management_class: true + +- name: Restore data sets from backup stored in the MVS file MY.BACKUP.DZP + Disable for al some datasets SMS storage and management classes data sets. + zos_backup_restore: + operation: restore + volume: MYVOL2 + backup_name: MY.BACKUP.DZP + sms: + disable_automatic_class: + - "ANSIBLE.TEST.**" + - "**.ONE.**" + disable_automatic_storage_class: true + disable_automatic_management_class: true - name: Backup all data sets matching the pattern USER.VSAM.** to z/OS UNIX - file /tmp/temp_backup.dzp and ensure the VSAM alternate index are preserved. + file /tmp/temp_backup.dzp and ensure the VSAM alternate index are preserved. zos_backup_restore: operation: backup data_sets: include: user.vsam.** backup_name: /tmp/temp_backup.dzp index: true - """ + RETURN = r""" changed: description: @@ -470,8 +540,17 @@ def main(): overwrite=dict(type="bool", default=False), compress=dict(type="bool", default=False), terse=dict(type="bool", default=True), - sms_storage_class=dict(type="str", required=False), - sms_management_class=dict(type="str", required=False), + sms=dict( + type='dict', + required=False, + options=dict( + storage_class=dict(type="str", required=False), + management_class=dict(type="str", required=False), + disable_automatic_class=dict(type="list", elements="str", required=False, default=[]), + disable_automatic_storage_class=dict(type="bool", required=False, default=False), + disable_automatic_management_class=dict(type="bool", required=False, default=False), + ) + ), hlq=dict(type="str", required=False), tmp_hlq=dict(type="str", required=False), # 2.0 redesign extra values for ADRDSSU keywords @@ -493,18 +572,16 @@ def main(): overwrite = params.get("overwrite") compress = params.get("compress") terse = params.get("terse") - sms_storage_class = params.get("sms_storage_class") - sms_management_class = params.get("sms_management_class") + sms = params.get("sms") hlq = params.get("hlq") tmp_hlq = params.get("tmp_hlq") - - # 2.0 redesign extra ADRDSSU keywords sphere = params.get("index") - # extra keyword supported by ZOAU but not part of their signature. - keywords = {} - if sphere: - keywords.update(sphere=None) + if sms and bool(sms.get("storage_class")) and sms.get("disable_automatic_storage_class"): + module.fail_json(msg="storage_class and disable_automatic_storage_class are mutually exclusive, only one can be use by operation.") + + if sms and bool(sms.get("management_class")) and sms.get("disable_automatic_management_class"): + module.fail_json(msg="management_class and disable_automatic_management_class are mutually exclusive, only one can be use by operation.") if operation == "backup": backup( @@ -520,10 +597,9 @@ def main(): recover=recover, space=space, space_type=space_type, - sms_storage_class=sms_storage_class, - sms_management_class=sms_management_class, + sms=sms, tmp_hlq=tmp_hlq, - keywords=keywords, + sphere=sphere, ) else: restore( @@ -538,10 +614,9 @@ def main(): hlq=hlq, space=space, space_type=space_type, - sms_storage_class=sms_storage_class, - sms_management_class=sms_management_class, + sms=sms, tmp_hlq=tmp_hlq, - keywords=keywords, + sphere=sphere, ) result["backup_name"] = backup_name result["changed"] = True @@ -614,8 +689,17 @@ def parse_and_validate_args(params): overwrite=dict(type="bool", default=False), compress=dict(type="bool", default=False), terse=dict(type="bool", default=True), - sms_storage_class=dict(type=sms_type, required=False), - sms_management_class=dict(type=sms_type, required=False), + sms=dict( + type='dict', + required=False, + options=dict( + storage_class=dict(type=sms_type, required=False), + management_class=dict(type=sms_type, required=False), + disable_automatic_class=dict(type="list", elements="str", required=False, default=[]), + disable_automatic_storage_class=dict(type="bool", required=False), + disable_automatic_management_class=dict(type="bool", required=False), + ) + ), hlq=dict(type=hlq_type, default=None, dependencies=["operation"]), tmp_hlq=dict(type=hlq_type, required=False), # 2.0 redesign extra values for ADRDSSU keywords @@ -642,10 +726,9 @@ def backup( recover, space, space_type, - sms_storage_class, - sms_management_class, + sms, tmp_hlq, - keywords, + sphere, ): """Backup data sets or a volume to a new data set or unix file. @@ -675,13 +758,11 @@ def backup( Specifies the amount of space to allocate for the backup. space_type : str The unit of measurement to use when defining data set space. - sms_storage_class : str - Specifies the storage class to use. - sms_management_class : str - Specifies the management class to use. + sms : dict + Specifies how System Managed Storage (SMS) interacts with the storage class. tmp_hlq : str Specifies the tmp hlq to temporary datasets. - keywords : dict + sphere : dict Specifies ADRDSSU keywords that is passed directly to the dunzip utility. """ args = locals() @@ -701,10 +782,9 @@ def restore( hlq, space, space_type, - sms_storage_class, - sms_management_class, + sms, tmp_hlq, - keywords, + sphere, ): """Restore data sets or a volume from the backup. @@ -736,13 +816,11 @@ def restore( created during the restore process. space_type : str The unit of measurement to use when defining data set space. - sms_storage_class : str - Specifies the storage class to use. - sms_management_class : str - Specifies the management class to use. + sms : dict + Specifies how System Managed Storage (SMS) interacts with the storage class. tmp_hlq : str Specifies the tmp hlq to temporary datasets. - keywords : dict + sphere : dict Specifies ADRDSSU keywords that is passed directly to the dunzip utility. Raises @@ -771,6 +849,62 @@ def restore( ) +def set_adrdssu_keywords(sphere, sms=None): + """Set the values for special keywords, dunzip use key value for most special words. + + Parameters + ---------- + sms : dict + Dictionary of key value of management an storage class. + sphere : bool + Value if sphere will be use on dictionary for VSAM. + + Returns + ------- + keywords : dict + Dictionary with key value paris. + """ + keywords = {} + + if sphere: + keywords.update(sphere=None) + + if sms: + if sms.get("disable_automatic_management_class"): + sms["management_class"] = "NULLMGMTCLAS" + + if sms.get("disable_automatic_storage_class"): + sms["storage_class"] = "NULLSTORCLAS" + + if len(sms.get("disable_automatic_class")) > 0: + bypassacs = set_bypassacs_str(sms.get("disable_automatic_class")) + keywords.update(bypass_acs=bypassacs) + + return keywords + + +def set_bypassacs_str(ds): + """_summary_ + + Parameters + ---------- + ds : list + List of datasets to be use. + + Returns + ------- + str : Datasets on str format. + """ + datasets = "" + if len(ds) > 0: + for dataset in ds: + if dataset == "**": + return "**" + datasets += f"{datasets} " + + return datasets + + def get_real_rc(output): """Parse out the final RC from MVS program output. @@ -1053,12 +1187,6 @@ def to_dzip_args(**kwargs): if kwargs.get("terse"): zoau_args["terse"] = kwargs.get("terse") - if kwargs.get("sms_storage_class"): - zoau_args["storage_class_name"] = kwargs.get("sms_storage_class") - - if kwargs.get("sms_management_class"): - zoau_args["management_class_name"] = kwargs.get("sms_management_class") - if kwargs.get("space"): size = str(kwargs.get("space")) if kwargs.get("space_type"): @@ -1068,8 +1196,18 @@ def to_dzip_args(**kwargs): if kwargs.get("tmp_hlq"): zoau_args["tmphlq"] = str(kwargs.get("tmp_hlq")) - if kwargs.get("keywords"): - zoau_args["keywords"] = kwargs.get("keywords") + sms = kwargs.get("sms") + keywords = set_adrdssu_keywords(sphere=kwargs.get("sphere"), sms=sms) + + if sms: + if sms.get("storage_class"): + zoau_args["storage_class_name"] = sms.get("storage_class") + + if sms.get("management_class"): + zoau_args["management_class_name"] = sms.get("management_class") + + if keywords: + zoau_args["keywords"] = keywords return zoau_args @@ -1110,12 +1248,6 @@ def to_dunzip_args(**kwargs): zoau_args["overwrite"] = kwargs.get("overwrite") sms_specified = False - if kwargs.get("sms_storage_class"): - zoau_args["storage_class_name"] = kwargs.get("sms_storage_class") - - if kwargs.get("sms_management_class"): - zoau_args["management_class_name"] = kwargs.get("sms_management_class") - if sms_specified: zoau_args["sms_for_tmp"] = True @@ -1134,8 +1266,28 @@ def to_dunzip_args(**kwargs): zoau_args["high_level_qualifier"] = str(kwargs.get("tmp_hlq")) zoau_args["keep_original_hlq"] = False - if kwargs.get("keywords"): - zoau_args["keywords"] = kwargs.get("keywords") + sms = kwargs.get("sms") + keywords = set_adrdssu_keywords(sphere=kwargs.get("sphere")) + + if sms: + if sms.get("sms_storage_class"): + zoau_args["storage_class_name"] = sms.get("storage_class") + + if sms.get("sms_management_class"): + zoau_args["management_class_name"] = sms.get("management_class") + + if sms.get("disable_automatic_management_class"): + zoau_args["null_management_class"] = sms.get("disable_automatic_management_class") + + if sms.get("disable_automatic_storage_class"): + zoau_args["null_storage_class"] = sms.get("disable_automatic_storage_class") + + if len(sms.get("disable_automatic_class")) > 0: + bypassacs = set_bypassacs_str(ds=sms.get("disable_automatic_class")) + zoau_args["bypass_acs"] = bypassacs + + if keywords: + zoau_args["keywords"] = keywords return zoau_args diff --git a/tests/conftest.py b/tests/conftest.py index f8ba410d5d..39c8741e1d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,7 +14,7 @@ __metaclass__ = type import pytest from ibm_zos_core.tests.helpers.ztest import ZTestHelper -from ibm_zos_core.tests.helpers.volumes import get_volumes, get_volumes_with_vvds, get_volume_and_unit +from ibm_zos_core.tests.helpers.volumes import get_volumes, get_volumes_with_vvds, get_volume_and_unit, get_volumes_sms_mgmt_class from ansible.plugins.action import ActionBase import sys from mock import MagicMock @@ -167,12 +167,30 @@ def volumes_unit_on_systems(ansible_zos_module, request): if path is None: src = request.config.getoption("--zinventory-raw") helper = ZTestHelper.from_args(src) - list_volumes = helper.get_volume_and_unit() + list_volumes = helper.get_volumes_list() else: - list_volumes = get_volume_and_unit(ansible_zos_module, path) + list_volumes = get_volume_and_unit(ansible_zos_module) yield list_volumes + +@pytest.fixture(scope="session") +def volumes_sms_systems(ansible_zos_module, request): + """ Call the pytest-ansible plugin to check volumes on the system and work properly a list by session.""" + path = request.config.getoption("--zinventory") + list_volumes = None + + if path is None: + src = request.config.getoption("--zinventory-raw") + helper = ZTestHelper.from_args(src) + list_volumes = helper.get_volumes_list() + else: + list_volumes = get_volumes(ansible_zos_module, path) + + volumes_with_sms = get_volumes_sms_mgmt_class(ansible_zos_module, list_volumes) + yield volumes_with_sms + + # * We no longer edit sys.modules directly to add zoautil_py mock # * because automatic teardown is not performed, leading to mock pollution # * across test files. diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index b5d8070fdb..660346813f 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -168,7 +168,6 @@ def assert_data_set_or_file_does_not_exist(hosts, name): def assert_data_set_exists(hosts, data_set_name): results = hosts.all.shell("dls '{0}'".format(data_set_name.upper())) for result in results.contacted.values(): - print(result) found = search( "^{0}$".format(data_set_name), result.get("stdout"), IGNORECASE | MULTILINE ) @@ -930,83 +929,139 @@ def test_backup_and_restore_a_data_set_with_same_hlq(ansible_zos_module): delete_remnants(hosts) -# def test_backup_and_restore_of_data_set_from_volume_to_new_volume(ansible_zos_module): -# hosts = ansible_zos_module -# try: -# delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) -# delete_data_set_or_file(hosts, data_set_name) -# delete_data_set_or_file(hosts, data_set_name2) -# delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) -# delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) -# create_sequential_data_set_with_contents( -# hosts, data_set_name, DATA_SET_CONTENTS, VOLUME -# ) -# create_sequential_data_set_with_contents( -# hosts, data_set_name2, DATA_SET_CONTENTS, VOLUME2 -# ) -# results = hosts.all.zos_backup_restore( -# operation="backup", -# data_sets=dict(include=DATA_SET_PATTERN), -# volume=VOLUME, -# backup_name=DATA_SET_BACKUP_LOCATION, -# overwrite=True, -# ) -# assert_module_did_not_fail(results) -# assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) -# results = hosts.all.zos_backup_restore( -# operation="restore", -# backup_name=DATA_SET_BACKUP_LOCATION, -# overwrite=True, -# volume=VOLUME, -# hlq=NEW_HLQ, -# ) -# assert_module_did_not_fail(results) -# assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) -# assert_data_set_does_not_exist(hosts, DATA_SET_RESTORE_LOCATION2) -# finally: -# delete_data_set_or_file(hosts, data_set_name) -# delete_data_set_or_file(hosts, data_set_name2) -# delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) -# delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) -# delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) +def test_backup_and_restore_of_data_set_from_volume_to_new_volume(ansible_zos_module, volumes_on_systems): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_restore_location = get_tmp_ds_name() + hlqs = "TMPHLQ" + try: + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + volume_2 = volumes.get_available_vol() + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_restore_location) + create_sequential_data_set_with_contents( + hosts, data_set_name, DATA_SET_CONTENTS, volume_1 + ) + results = hosts.all.zos_backup_restore( + operation="backup", + data_sets=dict(include=data_set_name), + volume=volume_1, + backup_name=data_set_restore_location, + overwrite=True, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, data_set_restore_location) + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=data_set_restore_location, + overwrite=True, + volume=volume_2, + hlq=hlqs, + ) + assert_module_did_not_fail(results) + assert_data_set_exists(hosts, data_set_restore_location) + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_restore_location) + delete_remnants(hosts, hlqs) -# def test_backup_and_restore_of_full_volume(ansible_zos_module): -# hosts = ansible_zos_module -# try: -# delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) -# delete_data_set_or_file(hosts, data_set_name) -# create_sequential_data_set_with_contents( -# hosts, data_set_name, DATA_SET_CONTENTS, VOLUME -# ) -# results = hosts.all.zos_backup_restore( -# operation="backup", -# volume=VOLUME, -# full_volume=True, -# sms_storage_class="DB2SMS10", -# backup_name=DATA_SET_BACKUP_LOCATION, -# overwrite=True, -# space=500, -# space_type="m", -# ) -# assert_module_did_not_fail(results) -# assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) -# delete_data_set_or_file(hosts, data_set_name) -# results = hosts.all.zos_backup_restore( -# operation="restore", -# backup_name=DATA_SET_BACKUP_LOCATION, -# overwrite=True, -# volume=VOLUME, -# full_volume=True, -# sms_storage_class="DB2SMS10", -# space=500, -# space_type="m", -# ) -# assert_module_did_not_fail(results) -# assert_data_set_exists_on_volume(hosts, data_set_name, VOLUME) -# finally: -# delete_data_set_or_file(hosts, data_set_name) -# delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) +def test_backup_and_restore_of_sms_group(ansible_zos_module, volumes_sms_systems): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_backup_location = get_tmp_ds_name() + try: + volumes = Volume_Handler(volumes_sms_systems) + volume, smsgrp = volumes.get_available_vol_with_sms() + delete_data_set_or_file(hosts, data_set_backup_location) + delete_data_set_or_file(hosts, data_set_name) + create_sequential_data_set_with_contents( + hosts, data_set_name, DATA_SET_CONTENTS, volume + ) + sms = {"storage_class":smsgrp} + results = hosts.all.zos_backup_restore( + data_sets=dict(include=data_set_name), + operation="backup", + volume=volume, + backup_name=data_set_backup_location, + overwrite=True, + sms=sms, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, data_set_backup_location) + delete_data_set_or_file(hosts, data_set_name) + sms = { + "disable_automatic_class":[data_set_name], + "disable_automatic_storage_class":True + } + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=data_set_backup_location, + overwrite=True, + volume=volume, + sms=sms, + ) + assert_module_did_not_fail(results) + assert_data_set_exists_on_volume(hosts, data_set_name, volume) + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_backup_location) + + +def test_backup_and_restore_all_of_sms_group(ansible_zos_module, volumes_sms_systems): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_backup_location = get_tmp_ds_name() + try: + volumes = Volume_Handler(volumes_sms_systems) + volume, smsgrp = volumes.get_available_vol_with_sms() + delete_data_set_or_file(hosts, data_set_backup_location) + delete_data_set_or_file(hosts, data_set_name) + create_sequential_data_set_with_contents( + hosts, data_set_name, DATA_SET_CONTENTS, volume + ) + sms = {"storage_class":smsgrp} + + for attempt in range(2): + results = hosts.all.zos_backup_restore( + data_sets=dict(include=data_set_name), + operation="backup", + volume=volume, + backup_name=data_set_backup_location, + overwrite=True, + sms=sms, + ) + for result in results.contacted.values(): + if result.get("failed", False) is not True: + break + else: + if smsgrp == "PRIMARY": + sms = {"storage_class":"DB2SMS10"} + else: + sms = {"storage_class":"PRIMARY"} + sc = sms["storage_class"] + if sc not in {"DB2SMS10", "PRIMARY"}: + pytest.skip(f"Skipping test: unsupported storage_class {sc}") + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, data_set_backup_location) + delete_data_set_or_file(hosts, data_set_name) + sms = { + "disable_automatic_class":['**'], + "disable_automatic_storage_class":True + } + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=data_set_backup_location, + overwrite=True, + volume=volume, + sms=sms, + ) + assert_module_did_not_fail(results) + assert_data_set_exists_on_volume(hosts, data_set_name, volume) + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_backup_location) @pytest.mark.parametrize("dstype", ["seq", "pds", "pdse"]) @@ -1249,7 +1304,6 @@ def managed_user_backup_of_data_set_tmphlq_restricted_user(ansible_zos_module): for result in results.contacted.values(): assert result.get("backup_name") == '', \ f"Backup name '{backup_name}' is there in output so tmphlq failed." - print(result) assert result.get("changed", False) is False finally: diff --git a/tests/helpers/volumes.py b/tests/helpers/volumes.py index d254a6e53a..b6355fa8dc 100644 --- a/tests/helpers/volumes.py +++ b/tests/helpers/volumes.py @@ -72,6 +72,16 @@ def get_available_vol_addr(self): print("Not more volumes in disposal return volume USER02") return "USER02","01A2" + def get_available_vol_with_sms(self): + """ Check in the list of volumes one on use or not, also send a default + volume USER02 as is the one with less data sets included.""" + for volume in self.volumes: + if not (volume.in_use): + volume.use() + return volume.name, volume.unit + print("Not more volumes in disposal return volume 222222") + return "222222","DB2SMS10" + def free_vol(self, vol): """ Check from the array the volume is already free for other test to use.""" for volume in self.volumes: @@ -114,7 +124,9 @@ def get_volumes(ansible_zos_module, path): storage_online.append(vol_w_info[3]) # Insert a volumes for the class ls_Volumes to give flag of in_use and correct manage for vol in storage_online: - list_volumes.append(vol) + valid = validate_ds_creation_on_volume(hosts, vol, 'seq') + if valid: + list_volumes.append(vol) if prefer_vols is not None: list(map(str, prefer_vols)) prefer_vols.extend(list_volumes) @@ -175,7 +187,7 @@ def create_vvds_on_volume( ansible_zos_module, volume): return False -def get_volume_and_unit(ansible_zos_module, path): +def get_volume_and_unit(ansible_zos_module): """Get an array of available volumes, and it's unit""" # Using the command d u,dasd,online to fill an array of available volumes with the priority # of of actives (A) and storage (STRG) first then online (O) and storage and if is needed, the @@ -184,7 +196,6 @@ def get_volume_and_unit(ansible_zos_module, path): hosts = ansible_zos_module list_volumes = [] all_volumes_list = [] - priv_online = [] flag = False iteration = 5 volumes_datasets = [] @@ -208,18 +219,8 @@ def get_volume_and_unit(ansible_zos_module, path): if len(vol_w_info)>3: if vol_w_info[2] == 'O' and "USER" in vol_w_info[3] and vol_w_info[4] == "PRIV/RSDNT": - # The next creation of dataset is to validate if the volume will work properly for the test suite - dataset = get_tmp_ds_name() - valid_creation = hosts.all.zos_data_set(name=dataset, type='pds', volumes=f'{vol_w_info[3]}') - - for valid in valid_creation.contacted.values(): - if valid.get("changed") == "false": - valid = False - else: - valid = True - hosts.all.zos_data_set(name=dataset, state="absent") - + valid = validate_ds_creation_on_volume(hosts, vol_w_info[3], "pds") # When is a valid volume is required to get the datasets present on the volume if valid: ds_on_vol = hosts.all.shell(cmd=f"vtocls {vol_w_info[3]}") @@ -232,3 +233,63 @@ def get_volume_and_unit(ansible_zos_module, path): list_volumes = [[x[1], x[2]] for x in sorted_volumes] return list_volumes + +def get_volumes_sms_mgmt_class(ansible_zos_module, volumes_on_system): + """ + From the current volumes available to write and delete dataset search for any sms group that is associate with. + """ + volumes_smsclass = find_volume_with_sms_class(ansible_zos_module, volumes_on_system) + if len(volumes_smsclass) > 0: + return volumes_smsclass + + volumes_smsclass = [] + print("Warning: No sms storage volumes on system, using DB2SMS10") + for vol in volumes_on_system: + volumes_smsclass.append([vol,'DB2SMS10']) + return volumes_smsclass + + +def find_volume_with_sms_class(ansible_zos_module, volumes_on_system): + """ + Fetches all volumes in the system and returns a list of volumes for + which there are sms class. + """ + hosts = ansible_zos_module + vols_sms = [] + content = "" + # D SMS,STORGRP(SG1),LISTVOL + # D SMS,STORGRP(ALL),LISTVOL + # D SMS,STORGRP(),LISTVOL + # D SMS,VOL(XXXXXX) + for vol in volumes_on_system: + response = hosts.all.zos_operator(cmd=f"D SMS,VOL({vol})") + for res in response.contacted.values(): + content = res.get('content') + for line in content: + if 'REJECTED' in line or 'EC' in line: + continue + else: + words = line.lstrip() + if words.startswith(vol): + sms_grp = words.strip().split()[-1] + if sms_grp != "PRIMARY": + vols_sms.append([vol, sms_grp]) + continue + return vols_sms + + +def validate_ds_creation_on_volume(ansible_zos_module, vol, type): + """ + Utility to validate the volumes we get from the system is available to create and delete datasets + """ + valid = True + hosts = ansible_zos_module + dataset = get_tmp_ds_name() + valid_creation = hosts.all.zos_data_set(name=dataset, type=type, volumes=vol) + for valid in valid_creation.contacted.values(): + if valid.get("changed") == "false": + valid = False + else: + valid = True + hosts.all.zos_data_set(name=dataset, state="absent") + return valid diff --git a/tests/unit/test_zos_backup_restore_unit.py b/tests/unit/test_zos_backup_restore_unit.py index c6e8ce9c85..37a3f9c41a 100644 --- a/tests/unit/test_zos_backup_restore_unit.py +++ b/tests/unit/test_zos_backup_restore_unit.py @@ -142,11 +142,14 @@ def test_valid_backup_name(zos_backup_restore_mocker, backup_name): ["MYIclASS", "M0341CSS", "storagec", "s", "$*@@%#1", "#"], ) def test_valid_sms_classes(zos_backup_restore_mocker, sms_class): + sms = { + "storage_class":sms_class, + "management_class":sms_class + } valid_args = dict( operation="backup", data_sets=dict(include="user.*"), - sms_storage_class=sms_class, - sms_management_class=sms_class, + sms=sms ) assert_args_valid(zos_backup_restore_mocker, valid_args) @@ -230,11 +233,14 @@ def test_invalid_backup_name(zos_backup_restore_mocker, backup_name): ["5555bad", "toolongclass", "bad!char", True, False, 100, 0, -1], ) def test_invalid_sms_classes(zos_backup_restore_mocker, sms_class): + sms = { + "storage_class":sms_class, + "management_class":sms_class + } valid_args = dict( operation="backup", data_sets=dict(include="user.*"), - sms_storage_class=sms_class, - sms_management_class=sms_class, + sms=sms ) assert_args_invalid(zos_backup_restore_mocker, valid_args) From 1a478740a915bc8e0b795db3baf7139295023a02 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Fri, 19 Sep 2025 19:21:35 +0530 Subject: [PATCH 52/73] Resolving PR review comments --- plugins/module_utils/better_arg_parser.py | 8 ++++--- plugins/modules/zos_started_task.py | 27 +++++++++-------------- 2 files changed, 15 insertions(+), 20 deletions(-) diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index cdb86b1cf1..62b9f247f2 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -256,7 +256,7 @@ def _dict_type(self, contents, resolved_dependencies): return contents def _basic_dict_type(self, contents, resolve_dependencies): - """Resolver for str type arguments. + """Resolver for basic dict type arguments. Parameters ---------- @@ -359,7 +359,8 @@ def _bool_type(self, contents, resolve_dependencies): return contents def _member_name_type(self, contents, resolve_dependencies): - """Resolver for data_set type arguments. + """Resolver for PDS/E member name type arguments. This is part of + zos_started_task member name validfation. Parameters ---------- @@ -391,7 +392,8 @@ def _member_name_type(self, contents, resolve_dependencies): return str(contents) def _identifier_name_type(self, contents, resolve_dependencies): - """Resolver for data_set type arguments. + """Resolver for identifier name type arguments. This is part of + zos_started_task identifier name validation. Parameters ---------- diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index adfe4521d6..6af4997be3 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -145,14 +145,13 @@ - 'NO' state: description: - - The desired state the started task should be after the module is executed. - - If state=started and the started task is not found on the managed node, no action is taken, - module completes successfully with changed=False. - - If state is cancelled , stopped or forced and the started task is not running on the - managed node, no action is taken, module completes successfully with changed=False. - - If state is modified and the started task is not running, not found or modification was not - done, the module will fail. - - If state is displayed the module will return the started task details. + - I(state) should be the desired state of the started task after the module is executed. + - If state is started and the respective member is not present on the managed node, then error will be thrown with rc=1, + changed=false and stderr which contains error details. + - If state is cancelled , modified, displayed, stopped or forced and the started task is not running on the managed node, + then error will be thrown with rc=1, changed=false and stderr contains error details. + - If state is displayed and the started task is running, then the module will return the started task details along with + changed=true. required: True type: str choices: @@ -198,9 +197,9 @@ default: false wait_time: description: - - Option wait_time is the total time that module zos_started_tak will wait for a submitted - task. The time begins when the module is executed on the managed node. Default value of 0 - means to wait the default amount of time supported by the opercmd utility. + - Option wait_time is the total time that module zos_started_task will wait for a submitted task in centiseconds. + The time begins when the module is executed on the managed node. Default value of 0 means to wait the default + amount of time supported by the opercmd utility. required: false default: 0 type: int @@ -1002,12 +1001,6 @@ def extract_keys(stdout): key = keys[key] if key in dsp_keys: data_space[key] = value - # key_val = current_task.get(key.lower()) - # if key_val: - # if isinstance(key_val, str): - # current_task[key.lower()] = [key_val, value] - # elif isinstance(key_val, list): - # current_task[key.lower()] = key_val + [value] else: current_task[key.lower()] = value if current_task.get("dataspaces"): From e097e771efc4a4ca041c57b80130ff5d7e156b0c Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Fri, 19 Sep 2025 19:31:29 +0530 Subject: [PATCH 53/73] Update zos_started_task.py --- plugins/modules/zos_started_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 6af4997be3..cf07cbb106 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -148,7 +148,7 @@ - I(state) should be the desired state of the started task after the module is executed. - If state is started and the respective member is not present on the managed node, then error will be thrown with rc=1, changed=false and stderr which contains error details. - - If state is cancelled , modified, displayed, stopped or forced and the started task is not running on the managed node, + - If state is cancelled , modified, displayed, stopped or forced and the started task is not running on the managed node, then error will be thrown with rc=1, changed=false and stderr contains error details. - If state is displayed and the started task is running, then the module will return the started task details along with changed=true. From ca35e3a415234da46cce2cbc5896d19375f1f5ca Mon Sep 17 00:00:00 2001 From: Demetri Date: Fri, 19 Sep 2025 11:03:50 -0700 Subject: [PATCH 54/73] Update spi comment (#2321) --- tests/helpers/users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/helpers/users.py b/tests/helpers/users.py index 4b370faef9..a28681a6ab 100644 --- a/tests/helpers/users.py +++ b/tests/helpers/users.py @@ -734,7 +734,7 @@ def _create_managed_user(self, managed_user: ManagedUserType) -> Tuple[str, str] try: cmd=f"{add_user_cmd.getvalue()}" - # need to connect with ssh -i /tmp/UPGLSFLH/id_rsa UPGLSFLH@ec01136a.vmec.svl.ibm.com + # need to connect with ssh -i /tmp/UPGLSFLH/id_rsa UPGLSFLH@xyz.com add_user_attributes = self._connect(self._remote_host, self._model_user,cmd) # Because this is a tsocmd run through shell, any user with a $ will be expanded and thus truncated, you can't change From 8a16e324656c62062d6ac7136e4d2e4c6f901bc9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 19 Sep 2025 15:39:51 -0500 Subject: [PATCH 55/73] Add branch protecion rules (#2322) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: André Marcel Gutiérrez Benítez --- branch_protection_rules.json | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 branch_protection_rules.json diff --git a/branch_protection_rules.json b/branch_protection_rules.json new file mode 100644 index 0000000000..ec67bed7dd --- /dev/null +++ b/branch_protection_rules.json @@ -0,0 +1,12 @@ +[{ + "type": "branch-protection", + "name": "code-review", + "params": { + "checks": [ + "tekton/code-branch-protection", + "tekton/code-unit-tests", + "tekton/code-vulnerability-scan", + "tekton/code-detect-secrets" + ] + } +}] \ No newline at end of file From 626fe36de1d0ac294a2bf673f89420e1f2eb70c7 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Fri, 19 Sep 2025 14:42:38 -0600 Subject: [PATCH 56/73] Changed keywords is_binary and is_executable to not use verbs (#2290) * Changed keywords is_binary and is_executable to not use verbs * Updated tests * Added changelog --- .../fragments/2290-update-verb-options.yml | 5 + plugins/action/zos_copy.py | 14 +- plugins/action/zos_fetch.py | 14 +- plugins/action/zos_script.py | 2 +- plugins/action/zos_unarchive.py | 2 +- plugins/module_utils/copy.py | 12 +- plugins/modules/zos_copy.py | 210 +++++++++--------- plugins/modules/zos_fetch.py | 64 +++--- .../functional/modules/test_zos_copy_func.py | 120 +++++----- .../functional/modules/test_zos_fetch_func.py | 14 +- .../functional/modules/test_zos_mount_func.py | 12 +- .../modules/test_zos_unarchive_func.py | 2 +- 12 files changed, 238 insertions(+), 233 deletions(-) create mode 100644 changelogs/fragments/2290-update-verb-options.yml diff --git a/changelogs/fragments/2290-update-verb-options.yml b/changelogs/fragments/2290-update-verb-options.yml new file mode 100644 index 0000000000..c632d97fd7 --- /dev/null +++ b/changelogs/fragments/2290-update-verb-options.yml @@ -0,0 +1,5 @@ +breaking_changes: + - zos_fetch - Option ``is_binary`` is replaced in favor of ``binary``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2290). + - zos_copy - Option ``is_binary`` is replaced in favor of ``binary``. Option ``is_executable`` is renamed to ``executable``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2290). \ No newline at end of file diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index bd6335017f..7745403b9c 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -55,9 +55,9 @@ def run(self, tmp=None, task_vars=None): backup = _process_boolean(task_args.get('backup'), default=False) local_follow = _process_boolean(task_args.get('local_follow'), default=False) remote_src = _process_boolean(task_args.get('remote_src'), default=False) - is_binary = _process_boolean(task_args.get('is_binary'), default=False) + binary = _process_boolean(task_args.get('binary'), default=False) force = _process_boolean(task_args.get('force'), default=False) - is_executable = _process_boolean(task_args.get('is_executable'), default=False) + executable = _process_boolean(task_args.get('executable'), default=False) asa_text = _process_boolean(task_args.get('asa_text'), default=False) ignore_sftp_stderr = _process_boolean(task_args.get("ignore_sftp_stderr"), default=True) backup_name = task_args.get("backup_name", None) @@ -104,7 +104,7 @@ def run(self, tmp=None, task_vars=None): msg = "'src' or 'content' is required" return self._exit_action(result, msg, failed=True) - if encoding and is_binary: + if encoding and binary: msg = "The 'encoding' parameter is not valid for binary transfer" return self._exit_action(result, msg, failed=True) @@ -112,12 +112,12 @@ def run(self, tmp=None, task_vars=None): msg = "Backup file provided but 'backup' parameter is False" return self._exit_action(result, msg, failed=True) - if is_binary and asa_text: - msg = "Both 'is_binary' and 'asa_text' are True. Unable to copy binary data as an ASA text file." + if binary and asa_text: + msg = "Both 'binary' and 'asa_text' are True. Unable to copy binary data as an ASA text file." return self._exit_action(result, msg, failed=True) - if is_executable and asa_text: - msg = "Both 'is_executable' and 'asa_text' are True. Unable to copy an is_executable as an ASA text file." + if executable and asa_text: + msg = "Both 'executable' and 'asa_text' are True. Unable to copy an executable as an ASA text file." return self._exit_action(result, msg, failed=True) use_template = _process_boolean(task_args.get("use_template"), default=False) diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index 2da09c0d1d..b8b51363bb 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -39,7 +39,7 @@ display = Display() -def _update_result(result, src, dest, ds_type="USS", is_binary=False): +def _update_result(result, src, dest, ds_type="USS", binary=False): """ Helper function to update output result with the provided values """ data_set_types = { "PS": "Sequential", @@ -60,7 +60,7 @@ def _update_result(result, src, dest, ds_type="USS", is_binary=False): "src": src, "dest": dest, "data_set_type": data_set_types[ds_type], - "is_binary": is_binary, + "binary": binary, } ) return updated_result @@ -122,7 +122,7 @@ def run(self, tmp=None, task_vars=None): encoding = self._task.args.get('encoding', None) flat = _process_boolean(self._task.args.get('flat'), default=False) fail_on_missing = _process_boolean(self._task.args.get('fail_on_missing'), default=True) - is_binary = _process_boolean(self._task.args.get('is_binary')) + binary = _process_boolean(self._task.args.get('binary')) ignore_sftp_stderr = _process_boolean( self._task.args.get("ignore_sftp_stderr"), default=True ) @@ -193,7 +193,7 @@ def run(self, tmp=None, task_vars=None): result = dict( src="", dest="", - is_binary=False, + binary=False, checksum="", changed=False, data_set_type="", @@ -208,7 +208,7 @@ def run(self, tmp=None, task_vars=None): # Populate it with the modules response result["src"] = fetch_res.get("src") result["dest"] = fetch_res.get("dest") - result["is_binary"] = fetch_res.get("is_binary", False) + result["binary"] = fetch_res.get("binary", False) result["checksum"] = fetch_res.get("checksum") result["changed"] = fetch_res.get("changed", False) result["data_set_type"] = fetch_res.get("data_set_type") @@ -334,7 +334,7 @@ def run(self, tmp=None, task_vars=None): result.update(fetch_content) return result - if validate_checksum and ds_type != "GDG" and ds_type != "PO" and not is_binary: + if validate_checksum and ds_type != "GDG" and ds_type != "PO" and not binary: new_checksum = _get_file_checksum(dest) result["changed"] = local_checksum != new_checksum result["checksum"] = new_checksum @@ -362,7 +362,7 @@ def run(self, tmp=None, task_vars=None): finally: self._remote_cleanup(remote_path, ds_type, encoding) - return _update_result(result, src, dest, ds_type, is_binary=is_binary) + return _update_result(result, src, dest, ds_type, binary=binary) def _transfer_remote_content( self, dest, remote_path, src_type, ignore_stderr=False diff --git a/plugins/action/zos_script.py b/plugins/action/zos_script.py index de4c5a9158..11beb2bd08 100644 --- a/plugins/action/zos_script.py +++ b/plugins/action/zos_script.py @@ -93,7 +93,7 @@ def run(self, tmp=None, task_vars=None): src=script_path, dest=tempfile_path, replace=True, - is_binary=False, + binary=False, encoding=module_args.get('encoding'), use_template=module_args.get('use_template', False), template_parameters=module_args.get('template_parameters', dict()) diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index ef22800107..bf4fe17a70 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -101,7 +101,7 @@ def run(self, tmp=None, task_vars=None): dest=dest, dest_data_set=dest_data_set, replace=force, - is_binary=True, + binary=True, ) ) copy_task = self._task.copy() diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index 1e8691786c..63f6bbdd62 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -78,7 +78,7 @@ def _validate_path(path): return parsed_args.get("path") -def copy_uss_mvs(src, dest, is_binary=False): +def copy_uss_mvs(src, dest, binary=False): """Wrapper function for datasets.copy that handles possible exceptions that may occur. @@ -91,7 +91,7 @@ def copy_uss_mvs(src, dest, is_binary=False): Keyword Parameters ------------------ - is_binary : bool + binary : bool Whether to perform a binary copy. Returns @@ -109,7 +109,7 @@ def copy_uss_mvs(src, dest, is_binary=False): "options": "" } - if is_binary: + if binary: copy_args["options"] = "-B" try: @@ -125,7 +125,7 @@ def copy_uss_mvs(src, dest, is_binary=False): return 0, "", "" -def copy_gdg2uss(src, dest, is_binary=False, asa_text=False): +def copy_gdg2uss(src, dest, binary=False, asa_text=False): """Copy a whole GDG to a USS path. Parameters @@ -137,7 +137,7 @@ def copy_gdg2uss(src, dest, is_binary=False, asa_text=False): Keyword Parameters ------------------ - is_binary : bool + binary : bool Whether the file to be copied contains binary data. asa_text : bool Whether the file to be copied contains ASA control @@ -155,7 +155,7 @@ def copy_gdg2uss(src, dest, is_binary=False, asa_text=False): "options": "" } - if is_binary or asa_text: + if binary or asa_text: copy_args["options"] = "-B" for gds in generations: diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index c73aea2957..b78ce106ce 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -44,8 +44,8 @@ - If neither C(src) or C(dest) have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. - - This option is only valid for text files. If C(is_binary) is C(true) - or C(is_executable) is C(true) as well, the module will fail. + - This option is only valid for text files. If C(binary) is C(true) + or C(executable) is C(true) as well, the module will fail. type: bool default: false required: false @@ -114,9 +114,9 @@ process outlined here and in the C(volume) option. - If C(dest) is a nonexistent data set, the attributes assigned will depend on the type of C(src). If C(src) is a USS file, C(dest) will have a Fixed Block (FB) record format and the - remaining attributes will be computed. If I(is_binary=true), C(dest) will have a Fixed Block + remaining attributes will be computed. If I(binary=true), C(dest) will have a Fixed Block (FB) record format with a record length of 80, block size of 32720, and the remaining - attributes will be computed. If I(is_executable=true),C(dest) will have an Undefined (U) record + attributes will be computed. If I(executable=true),C(dest) will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. - If C(src) is a file and C(dest) a partitioned data set, C(dest) does not need to include @@ -154,7 +154,7 @@ - If C(encoding) is not provided, the module determines which local and remote charsets to convert the data from and to. Note that this is only done for text data and not binary data. - - Only valid if C(is_binary) is false. + - Only valid if C(binary) is false. type: dict required: false suboptions: @@ -225,21 +225,21 @@ required: false default: true version_added: "1.4.0" - is_binary: + binary: description: - If set to C(true), indicates that the file or data set to be copied is a binary file or data set. - - When I(is_binary=true), no encoding conversion is applied to the content, + - When I(binary=true), no encoding conversion is applied to the content, all content transferred retains the original state. - - Use I(is_binary=true) when copying a Database Request Module (DBRM) to + - Use I(binary=true) when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. type: bool default: false required: false - is_executable: + executable: description: - If set to C(true), indicates that the file or library to be copied is an executable. - - If I(is_executable=true), and C(dest) is a data set, it must be a PDS or PDSE (library). + - If I(executable=true), and C(dest) is a data set, it must be a PDS or PDSE (library). - If C(dest) is a nonexistent data set, the library attributes assigned will be Undefined (U) record format with a record length of 0, block size of 32760 and the remaining attributes will be computed. @@ -253,7 +253,7 @@ - If set to C(true), indicates that any aliases found in the source (USS file, USS dir, PDS/E library or member) are to be preserved during the copy operation. - Aliases are implicitly preserved when libraries are copied over to USS destinations. - That is, when C(is_executable=True) and C(dest) is a USS file or directory, this option will be ignored. + That is, when C(executable=True) and C(dest) is a USS file or directory, this option will be ignored. - Copying of aliases for text-based data sets from USS sources or to USS destinations is not currently supported. - If the C(dest) is Unix, the alias is not visible in Unix, even though the information is there and will be visible if copied to a library. @@ -566,7 +566,7 @@ transfers, if not available, the module will fail. - Beginning in version 1.8.x, zos_copy will no longer attempt to correct a copy of a data type member into a PDSE that contains program objects. You can control this - behavior using module option C(is_executable) that will signify an executable is being + behavior using module option C(executable) that will signify an executable is being copied into a PDSE with other executables. Mixing data type members with program objects will result in a (FSUM8976,./zos_copy.html) error. - It is the playbook author or user's responsibility to ensure they have @@ -656,7 +656,7 @@ zos_copy: src: /path/to/binary/file dest: HLQ.SAMPLE.PDSE(MEMBER) - is_binary: true + binary: true - name: Copy a sequential data set to a PDS member zos_copy: @@ -753,7 +753,7 @@ src: HLQ.COBOLSRC.PDSE(TESTPGM) dest: HLQ.NEW.PDSE(MYCOBOL) remote_src: true - is_executable: true + executable: true aliases: true - name: Copy a Load Library from a USS directory /home/loadlib to a new PDSE @@ -761,7 +761,7 @@ src: '/home/loadlib/' dest: HLQ.LOADLIB.NEW remote_src: true - is_executable: true + executable: true aliases: true - name: Copy a file with ASA characters to a new sequential data set. @@ -986,8 +986,8 @@ class CopyHandler(object): def __init__( self, module, - is_binary=False, - is_executable=False, + binary=False, + executable=False, aliases=False, asa_text=False, backup_name=None, @@ -1005,10 +1005,10 @@ def __init__( Keyword Parameters ------------------ - is_binary : bool + binary : bool Whether the file or data set to be copied contains binary data. - is_executable : bool + executable : bool Whether the file or data set to be copied is executable. asa_text : bool @@ -1030,10 +1030,10 @@ def __init__( module : AnsibleModule The AnsibleModule object from currently running module. - is_binary : bool + binary : bool Whether the file or data set to be copied contains binary data. - is_executable : bool + executable : bool Whether the file or data set to be copied is executable. asa_text : bool @@ -1051,8 +1051,8 @@ def __init__( High Level Qualifier for temporary datasets. """ self.module = module - self.is_binary = is_binary - self.is_executable = is_executable + self.binary = binary + self.executable = executable self.asa_text = asa_text self.aliases = aliases self.backup_name = backup_name @@ -1122,7 +1122,7 @@ def copy_to_seq( else: # While ASA files are just text files, we do a binary copy # so dcp doesn't introduce any additional blanks or newlines. - if self.is_binary or self.asa_text: + if self.binary or self.asa_text: copy_args["options"] = "-B" try: @@ -1189,7 +1189,7 @@ def copy_to_gdg(self, src, dest): copy_args = { "options": "" } - if self.is_binary or self.asa_text: + if self.binary or self.asa_text: copy_args["options"] = "-B" success = True @@ -1563,8 +1563,8 @@ class USSCopyHandler(CopyHandler): def __init__( self, module, - is_binary=False, - is_executable=False, + binary=False, + executable=False, asa_text=False, aliases=False, common_file_args=None, @@ -1584,7 +1584,7 @@ def __init__( common_file_args : dict Mode, group and owner information to be applied to destination file. - is_binary : bool + binary : bool Whether the file to be copied contains binary data. backup_name : str The USS path or data set name of destination backup. @@ -1599,8 +1599,8 @@ def __init__( """ super().__init__( module, - is_binary=is_binary, - is_executable=is_executable, + binary=binary, + executable=executable, asa_text=asa_text, aliases=aliases, backup_name=backup_name, @@ -1655,7 +1655,7 @@ def copy_to_uss( src, dest, src_ds_type, src_member, member_name=member_name ) - if self.is_executable: + if self.executable: status = os.stat(dest) os.chmod(dest, status.st_mode | stat.S_IEXEC) else: @@ -1732,15 +1732,15 @@ def _copy_to_file(self, src, dest, content_copy, conv_path): dest = os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(src_path)) new_src = conv_path or src try: - if self.is_binary: - copy.copy_uss_mvs(new_src, dest, is_binary=True) + if self.binary: + copy.copy_uss_mvs(new_src, dest, binary=True) else: opts = dict() opts["options"] = "" datasets.copy(new_src, dest, **opts) shutil.copystat(new_src, dest, follow_symlinks=True) # shutil.copy(new_src, dest) - if self.is_executable: + if self.executable: status = os.stat(dest) os.chmod(dest, status.st_mode | stat.S_IEXEC) except zoau_exceptions.ZOAUException as err: @@ -1953,7 +1953,7 @@ def _mvs_copy_to_uss( if self.asa_text: response = copy.copy_asa_mvs2uss(src, dest, tmphlq=self.tmphlq) rc = response.rc - elif self.is_executable: + elif self.executable: try: rc = datasets.copy(src, dest, alias=True, executable=True) except zoau_exceptions.ZOAUException as copy_exception: @@ -1978,7 +1978,7 @@ def _mvs_copy_to_uss( result = copy.copy_gdg2uss( src, dest, - is_binary=self.is_binary, + binary=self.binary, asa_text=self.asa_text ) @@ -1986,7 +1986,7 @@ def _mvs_copy_to_uss( raise CopyOperationError( msg=f"Error while copying GDG {src} to {dest}" ) - elif self.is_executable: + elif self.executable: try: datasets.copy(src, dest, alias=True, executable=True) except zoau_exceptions.ZOAUException as copy_exception: @@ -2010,7 +2010,7 @@ def _mvs_copy_to_uss( copy.copy_uss_mvs( src, dest, - is_binary=self.is_binary + binary=self.binary ) except CopyOperationError as err: raise err @@ -2022,8 +2022,8 @@ class PDSECopyHandler(CopyHandler): def __init__( self, module, - is_binary=False, - is_executable=False, + binary=False, + executable=False, aliases=False, asa_text=False, backup_name=None, @@ -2041,7 +2041,7 @@ def __init__( Keyword Parameters ------------------ - is_binary : bool + binary : bool Whether the data set to be copied contains binary data. backup_name : str @@ -2051,8 +2051,8 @@ def __init__( """ super().__init__( module, - is_binary=is_binary, - is_executable=is_executable, + binary=binary, + executable=executable, aliases=aliases, asa_text=asa_text, backup_name=backup_name, @@ -2107,7 +2107,7 @@ def copy_to_pdse( path, dirs, files = next(os.walk(new_src)) src_members = [ - os.path.normpath("{0}/{1}".format(path, file)) if (self.is_binary or self.is_executable) + os.path.normpath("{0}/{1}".format(path, file)) if (self.binary or self.executable) else normalize_line_endings("{0}/{1}".format(path, file), encoding) for file in files ] @@ -2234,11 +2234,11 @@ def copy_to_member( else: # While ASA files are just text files, we do a binary copy # so dcp doesn't introduce any additional blanks or newlines. - if self.is_binary or self.asa_text: + if self.binary or self.asa_text: opts["options"] = "-B" try: - rc = datasets.copy(src, dest, alias=self.aliases, executable=self.is_executable, force=self.force, **opts) + rc = datasets.copy(src, dest, alias=self.aliases, executable=self.executable, force=self.force, **opts) out = "" err = "" except zoau_exceptions.ZOAUException as copy_exception: @@ -2285,14 +2285,14 @@ def get_file_record_length(file): return max_line_length -def dump_data_set_member_to_file(data_set_member, is_binary): +def dump_data_set_member_to_file(data_set_member, binary): """Dumps a data set member into a file in USS. Parameters ---------- data_set_member : str Name of the data set member to dump. - is_binary : bool + binary : bool Whether the data set member contains binary data. Returns @@ -2309,7 +2309,7 @@ def dump_data_set_member_to_file(data_set_member, is_binary): os.close(fd) copy_args = dict() - if is_binary: + if binary: copy_args["options"] = "-B" response = datasets.copy(data_set_member, temp_path, **copy_args) @@ -2322,7 +2322,7 @@ def dump_data_set_member_to_file(data_set_member, is_binary): def get_data_set_attributes( name, size, - is_binary, + binary, asa_text=False, record_format=None, record_length=None, @@ -2349,7 +2349,7 @@ def get_data_set_attributes( Name of the new sequential data set. size : int Number of bytes needed for the new data set. - is_binary : bool + binary : bool Whether or not the data set will have binary data. asa_text : bool Whether the data set will have ASA control characters. @@ -2374,14 +2374,14 @@ def get_data_set_attributes( # set default value - record_format if record_format is None: - if is_binary: + if binary: record_format = "FB" else: record_format = "VB" # set default value - record_length if record_length is None: - if is_binary: + if binary: record_length = 80 else: record_length = 1028 @@ -2420,7 +2420,7 @@ def create_seq_dataset_from_file( file, dest, replace, - is_binary, + binary, asa_text, record_length=None, volume=None, @@ -2437,7 +2437,7 @@ def create_seq_dataset_from_file( Name of the data set. replace : bool Whether to replace an existing data set. - is_binary : bool + binary : bool Whether the file has binary data. asa_text bool Whether the file has ASA control characters. @@ -2458,7 +2458,7 @@ def create_seq_dataset_from_file( # When src is a binary file, the module will use default attributes # for the data set, such as a record format of "VB". - if not is_binary: + if not binary: record_format = "FB" if not record_length: record_length = get_file_record_length(file) @@ -2472,7 +2472,7 @@ def create_seq_dataset_from_file( dest_params = get_data_set_attributes( name=dest, size=src_size, - is_binary=is_binary, + binary=binary, asa_text=asa_text, record_format=record_format, record_length=record_length, @@ -2523,7 +2523,7 @@ def is_compatible( src_member, is_src_dir, is_src_inline, - is_executable, + executable, asa_text, src_has_asa_chars, dest_has_asa_chars, @@ -2547,7 +2547,7 @@ def is_compatible( Whether the src is a USS directory. is_src_inline : bool Whether the src comes from inline content. - is_executable : bool + executable : bool Whether the src is a executable to be copied. asa_text : bool Whether the copy operation will handle ASA control characters. @@ -2577,7 +2577,7 @@ def is_compatible( # If source or destination is a sequential data set and executable as true # is incompatible to execute the copy. # ******************************************************************** - if is_executable: + if executable: if src_type in data_set.DataSet.MVS_SEQ or dest_type in data_set.DataSet.MVS_SEQ: return False @@ -2851,7 +2851,7 @@ def get_attributes_of_any_dataset_created( src_ds_type, src, src_name, - is_binary, + binary, asa_text, volume=None ): @@ -2868,7 +2868,7 @@ def get_attributes_of_any_dataset_created( Name of the source data set, used as a model when appropiate. src_name : str Extraction of the source name without the member pattern. - is_binary : bool + binary : bool Whether the data set will contain binary data. asa_text : bool Whether the data set will contain ASA control characters. @@ -2888,7 +2888,7 @@ def get_attributes_of_any_dataset_created( params = get_data_set_attributes( dest, size=size, - is_binary=is_binary, + binary=binary, asa_text=asa_text, volume=volume ) @@ -2897,7 +2897,7 @@ def get_attributes_of_any_dataset_created( params = get_data_set_attributes( dest, size=size, - is_binary=is_binary, + binary=binary, asa_text=asa_text, volume=volume ) @@ -2907,7 +2907,7 @@ def get_attributes_of_any_dataset_created( params = get_data_set_attributes( dest, size=size, - is_binary=is_binary, + binary=binary, asa_text=asa_text, volume=volume ) @@ -2921,8 +2921,8 @@ def allocate_destination_data_set( dest_ds_type, dest_exists, replace, - is_binary, - is_executable, + binary, + executable, asa_text, is_gds, is_active_gds, @@ -2948,9 +2948,9 @@ def allocate_destination_data_set( Whether the destination data set already exists. replace : bool Whether to replace an existent data set. - is_binary : bool + binary : bool Whether the data set will contain binary data. - is_executable : bool + executable : bool Whether the data to copy is an executable dataset or file. asa_text : bool Whether the data to copy has ASA control characters. @@ -3028,7 +3028,7 @@ def allocate_destination_data_set( if src_ds_type == "USS": # Taking the temp file when a local file was copied with sftp. - create_seq_dataset_from_file(src, dest, replace, is_binary, asa_text, volume=volume, tmphlq=tmphlq) + create_seq_dataset_from_file(src, dest, replace, binary, asa_text, volume=volume, tmphlq=tmphlq) elif src_ds_type in data_set.DataSet.MVS_SEQ: # Only applying the GDS special case when we don't have an absolute name. if is_gds and not is_active_gds: @@ -3042,12 +3042,12 @@ def allocate_destination_data_set( # size for the new data set. src_attributes = datasets.list_datasets(src_name)[0] record_length = int(src_attributes.record_length) - temp_dump = dump_data_set_member_to_file(src, is_binary) + temp_dump = dump_data_set_member_to_file(src, binary) create_seq_dataset_from_file( temp_dump, dest, replace, - is_binary, + binary, asa_text, record_length=record_length, volume=volume, @@ -3063,7 +3063,7 @@ def allocate_destination_data_set( if is_gds and not is_active_gds: data_set.DataSet.allocate_gds_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) else: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=is_executable, asa_text=asa_text, vol=volume, tmphlq=tmphlq) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume, tmphlq=tmphlq) elif src_ds_type in data_set.DataSet.MVS_SEQ: src_attributes = datasets.list_datasets(src_name)[0] # The size returned by listing is in bytes. @@ -3073,7 +3073,7 @@ def allocate_destination_data_set( dest_params = get_data_set_attributes( dest, size, - is_binary, + binary, asa_text, record_format=record_format, record_length=record_length, @@ -3088,7 +3088,7 @@ def allocate_destination_data_set( record_format = record_length = None type_ds = "PDSE" - if is_binary: + if binary: record_format = "FB" record_length = 80 else: @@ -3100,7 +3100,7 @@ def allocate_destination_data_set( if asa_text: record_length += 1 - if is_executable: + if executable: record_format = "U" record_length = 0 type_ds = "LIBRARY" @@ -3108,7 +3108,7 @@ def allocate_destination_data_set( dest_params = get_data_set_attributes( dest, size, - is_binary, + binary, asa_text, record_format=record_format, record_length=record_length, @@ -3120,9 +3120,9 @@ def allocate_destination_data_set( size = sum(os.stat("{0}/{1}".format(src, member)).st_size for member in os.listdir(src)) # This PDSE will be created with record format VB and a record length of 1028. - if is_executable: + if executable: dest_params = get_data_set_attributes( - dest, size, is_binary, + dest, size, binary, record_format='U', record_length=0, type="LIBRARY", @@ -3132,7 +3132,7 @@ def allocate_destination_data_set( dest_params = get_data_set_attributes( dest, size, - is_binary, + binary, asa_text, type="PDSE", volume=volume @@ -3173,7 +3173,7 @@ def allocate_destination_data_set( src_ds_type, src, src_name, - is_binary, + binary, asa_text, volume ) @@ -3392,8 +3392,8 @@ def run_module(module, arg_def): src = module.params.get('src') dest = module.params.get('dest') remote_src = module.params.get('remote_src') - is_binary = module.params.get('is_binary') - is_executable = module.params.get('is_executable') + binary = module.params.get('binary') + executable = module.params.get('executable') asa_text = module.params.get('asa_text') aliases = module.params.get('aliases') backup = module.params.get('backup') @@ -3518,7 +3518,7 @@ def run_module(module, arg_def): # When the destination is a dataset, we'll normalize the source # file to UTF-8 for the record length computation as Python # generally uses UTF-8 as the default encoding. - if not is_binary and not is_uss and not is_executable: + if not binary and not is_uss and not executable: new_src = src new_src = os.path.normpath(new_src) # Normalizing encoding when src is a USS file (only). @@ -3544,7 +3544,7 @@ def run_module(module, arg_def): ) # Creating the handler just for tagging, we're not copying yet! - copy_handler = CopyHandler(module, is_binary=is_binary) + copy_handler = CopyHandler(module, binary=binary) copy_handler._tag_file_encoding(converted_src, "UTF-8") else: if (is_src_gds and data_set.DataSet.data_set_exists(src, tmphlq=tmphlq)) or ( @@ -3603,7 +3603,7 @@ def run_module(module, arg_def): # dest_data_set.type overrides `dest_ds_type` given precedence rules if dest_data_set and dest_data_set.get("type"): dest_ds_type = dest_data_set.get("type").upper() - elif is_executable: + elif executable: # When executable is selected and dest_exists is false means an executable PDSE was copied to remote, # so we need to provide the correct dest_ds_type that will later be transformed into LIBRARY. # Not using LIBRARY at this step since there are many checks with dest_ds_type in data_set.DataSet.MVS_PARTITIONED @@ -3670,7 +3670,7 @@ def run_module(module, arg_def): src_member, is_src_dir, (src_ds_type == "USS" and src is None), - is_executable, + executable, asa_text, src_has_asa_chars, dest_has_asa_chars, @@ -3707,11 +3707,11 @@ def run_module(module, arg_def): # Alias support is not avaiable to and from USS for text-based data sets. # ******************************************************************** if aliases: - if (src_ds_type == 'USS' or dest_ds_type == 'USS') and not is_executable: + if (src_ds_type == 'USS' or dest_ds_type == 'USS') and not executable: module.fail_json( msg="Alias support for text-based data sets is not available " + "for USS sources (src) or targets (dest). " - + "Try setting is_executable=True or aliases=False." + + "Try setting executable=True or aliases=False." ) # ******************************************************************** @@ -3818,8 +3818,8 @@ def run_module(module, arg_def): dest_ds_type, dest_exists, replace, - is_binary, - is_executable, + binary, + executable, asa_text, is_dest_gds, is_dest_gds_active, @@ -3852,8 +3852,8 @@ def run_module(module, arg_def): # ******************************************************************** copy_handler = CopyHandler( module, - is_binary=is_binary, - is_executable=is_executable, + binary=binary, + executable=executable, asa_text=asa_text, backup_name=backup_name, force=force, @@ -3874,14 +3874,14 @@ def run_module(module, arg_def): # --------------------------------------------------------------------- if is_uss: # Removing the carriage return characters - if src_ds_type == "USS" and not is_binary and not is_executable: + if src_ds_type == "USS" and not binary and not executable: new_src = conv_path or src if os.path.isfile(new_src): conv_path = copy_handler.remove_cr_endings(new_src) uss_copy_handler = USSCopyHandler( module, - is_binary=is_binary, - is_executable=is_executable, + binary=binary, + executable=executable, asa_text=asa_text, aliases=aliases, common_file_args=dict(mode=mode, group=group, owner=owner), @@ -3931,7 +3931,7 @@ def run_module(module, arg_def): # --------------------------------------------------------------------- elif dest_ds_type in data_set.DataSet.MVS_SEQ: # TODO: check how ASA behaves with this - if src_ds_type == "USS" and not is_binary: + if src_ds_type == "USS" and not binary: new_src = conv_path or src conv_path = normalize_line_endings(new_src, encoding) @@ -3951,8 +3951,8 @@ def run_module(module, arg_def): pdse_copy_handler = PDSECopyHandler( module, - is_binary=is_binary, - is_executable=is_executable, + binary=binary, + executable=executable, asa_text=asa_text, aliases=aliases, backup_name=backup_name, @@ -4014,8 +4014,8 @@ def main(): argument_spec=dict( src=dict(type='str'), dest=dict(required=True, type='str'), - is_binary=dict(type='bool', default=False), - is_executable=dict(type='bool', default=False), + binary=dict(type='bool', default=False), + executable=dict(type='bool', default=False), asa_text=dict(type='bool', default=False), aliases=dict(type='bool', default=False, required=False), identical_gdg_copy=dict(type='bool', default=False), @@ -4119,8 +4119,8 @@ def main(): arg_def = dict( src=dict(arg_type='data_set_or_path', required=False), dest=dict(arg_type='data_set_or_path', required=True), - is_binary=dict(arg_type='bool', required=False, default=False), - is_executable=dict(arg_type='bool', required=False, default=False), + binary=dict(arg_type='bool', required=False, default=False), + executable=dict(arg_type='bool', required=False, default=False), asa_text=dict(arg_type='bool', required=False, default=False), aliases=dict(arg_type='bool', required=False, default=False), identical_gdg_copy=dict(type='bool', default=False), @@ -4188,8 +4188,8 @@ def main(): if ( not module.params.get("encoding").get("to") and not module.params.get("remote_src") - and not module.params.get("is_binary") - and not module.params.get("is_executable") + and not module.params.get("binary") + and not module.params.get("executable") ): module.params["encoding"]["to"] = encode.Defaults.get_default_system_charset() elif ( diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index cb270ab311..e0cf45186a 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -77,7 +77,7 @@ required: false default: "false" type: bool - is_binary: + binary: description: - Specifies if the file being fetched is a binary. required: false @@ -190,7 +190,7 @@ src: SOME.PDS.DATASET dest: /tmp/ flat: true - is_binary: true + binary: true - name: Fetch a UNIX file and don't validate its checksum zos_fetch: @@ -252,7 +252,7 @@ returned: success type: str sample: /tmp/SOME.DATA.SET -is_binary: +binary: description: Indicates the transfer mode that was used to fetch. returned: success type: bool @@ -531,7 +531,7 @@ def _copy_vsam_to_temp_data_set(self, ds_name): return out_ds_name - def _fetch_uss_file(self, src, is_binary, encoding=None): + def _fetch_uss_file(self, src, binary, encoding=None): """Convert encoding of a USS file. Return a tuple of temporary file name containing converted data. @@ -539,7 +539,7 @@ def _fetch_uss_file(self, src, is_binary, encoding=None): ---------- src : str Source of the file. - is_binary : bool + binary : bool If is binary. encoding : str The file encoding. @@ -555,7 +555,7 @@ def _fetch_uss_file(self, src, is_binary, encoding=None): Any exception ocurred while converting encoding. """ file_path = None - if (not is_binary) and encoding: + if (not binary) and encoding: fd, file_path = tempfile.mkstemp() from_code_set = encoding.get("from") to_code_set = encoding.get("to") @@ -579,7 +579,7 @@ def _fetch_uss_file(self, src, is_binary, encoding=None): return file_path if file_path else src - def _fetch_vsam(self, src, is_binary, encoding=None): + def _fetch_vsam(self, src, binary, encoding=None): """Copy the contents of a VSAM to a sequential data set. Afterwards, copy that data set to a USS file. @@ -587,7 +587,7 @@ def _fetch_vsam(self, src, is_binary, encoding=None): ---------- src : str Source of the file. - is_binary : bool + binary : bool If is binary. encoding : str The file encoding. @@ -603,7 +603,7 @@ def _fetch_vsam(self, src, is_binary, encoding=None): Unable to delete temporary dataset. """ temp_ds = self._copy_vsam_to_temp_data_set(src) - file_path = self._fetch_mvs_data(temp_ds, is_binary, encoding=encoding) + file_path = self._fetch_mvs_data(temp_ds, binary, encoding=encoding) rc = datasets.delete(temp_ds) if rc != 0: os.remove(file_path) @@ -613,7 +613,7 @@ def _fetch_vsam(self, src, is_binary, encoding=None): return file_path - def _fetch_pdse(self, src, is_binary, temp_dir=None, encoding=None): + def _fetch_pdse(self, src, binary, temp_dir=None, encoding=None): """Copy a partitioned data set to a USS directory. If the data set is not being fetched in binary mode, encoding for all members inside the data set will be converted. @@ -622,7 +622,7 @@ def _fetch_pdse(self, src, is_binary, temp_dir=None, encoding=None): ---------- src : str Source of the dataset. - is_binary : bool + binary : bool If it is binary. temp_dir : str Parent directory for the temp directory of the copy. @@ -647,7 +647,7 @@ def _fetch_pdse(self, src, is_binary, temp_dir=None, encoding=None): "options": "" } - if is_binary: + if binary: copy_args["options"] = "-B" try: @@ -667,7 +667,7 @@ def _fetch_pdse(self, src, is_binary, temp_dir=None, encoding=None): stderr_lines=copy_exception.response.stderr_response.splitlines(), ) - if (not is_binary) and encoding: + if (not binary) and encoding: enc_utils = encode.EncodeUtils() from_code_set = encoding.get("from") to_code_set = encoding.get("to") @@ -690,7 +690,7 @@ def _fetch_pdse(self, src, is_binary, temp_dir=None, encoding=None): ) return dir_path - def _fetch_gdg(self, src, is_binary, encoding=None): + def _fetch_gdg(self, src, binary, encoding=None): """Copy a generation data group to a USS directory. If the data set is not being fetched in binary mode, encoding for all data sets inside the GDG will be converted. @@ -699,7 +699,7 @@ def _fetch_gdg(self, src, is_binary, encoding=None): ---------- src : str Source of the generation data group. - is_binary : bool + binary : bool If it is binary. encoding : str The file encoding. @@ -723,7 +723,7 @@ def _fetch_gdg(self, src, is_binary, encoding=None): if current_gds.organization in data_set.DataSet.MVS_SEQ: self._fetch_mvs_data( current_gds.name, - is_binary, + binary, temp_dir=dir_path, file_override=current_gds.name, encoding=encoding @@ -731,14 +731,14 @@ def _fetch_gdg(self, src, is_binary, encoding=None): elif current_gds.organization in data_set.DataSet.MVS_PARTITIONED: self._fetch_pdse( current_gds.name, - is_binary, + binary, temp_dir=dir_path, encoding=encoding ) return dir_path - def _fetch_mvs_data(self, src, is_binary, temp_dir=None, file_override=None, encoding=None): + def _fetch_mvs_data(self, src, binary, temp_dir=None, file_override=None, encoding=None): """Copy a sequential data set or a partitioned data set member to a USS file. @@ -746,7 +746,7 @@ def _fetch_mvs_data(self, src, is_binary, temp_dir=None, file_override=None, enc ---------- src : str Source of the dataset. - is_binary : bool + binary : bool If it is binary. temp_dir : str Parent directory for the temp directory of the copy. @@ -781,7 +781,7 @@ def _fetch_mvs_data(self, src, is_binary, temp_dir=None, file_override=None, enc "options": "" } - if is_binary: + if binary: copy_args["options"] = "-B" try: @@ -798,7 +798,7 @@ def _fetch_mvs_data(self, src, is_binary, temp_dir=None, file_override=None, enc stderr_lines=copy_exception.response.stderr_response.splitlines(), ) - if (not is_binary) and encoding: + if (not binary) and encoding: enc_utils = encode.EncodeUtils() from_code_set = encoding.get("from") to_code_set = encoding.get("to") @@ -846,7 +846,7 @@ def run_module(): dest=dict(required=True, type="path"), fail_on_missing=dict(required=False, default=True, type="bool"), flat=dict(required=False, default=False, type="bool"), - is_binary=dict(required=False, default=False, type="bool"), + binary=dict(required=False, default=False, type="bool"), use_qualifier=dict(required=False, default=False, type="bool"), validate_checksum=dict(required=False, default=True, type="bool"), encoding=dict(required=False, type="dict"), @@ -869,12 +869,12 @@ def run_module(): src=dict(arg_type="data_set_or_path", required=True), dest=dict(arg_type="path", required=True), fail_on_missing=dict(arg_type="bool", required=False, default=True), - is_binary=dict(arg_type="bool", required=False, default=False), + binary=dict(arg_type="bool", required=False, default=False), use_qualifier=dict(arg_type="bool", required=False, default=False), tmp_hlq=dict(type='qualifier_or_empty', required=False, default=None), ) - if not module.params.get("encoding").get("from") and not module.params.get("is_binary"): + if not module.params.get("encoding").get("from") and not module.params.get("binary"): mvs_src = data_set.is_data_set(src) remote_charset = encode.Defaults.get_default_system_charset() @@ -911,7 +911,7 @@ def run_module(): src = parsed_args.get("src") b_src = to_bytes(src) fail_on_missing = boolean(parsed_args.get("fail_on_missing")) - is_binary = boolean(parsed_args.get("is_binary")) + binary = boolean(parsed_args.get("binary")) encoding = module.params.get("encoding") tmphlq = module.params.get("tmp_hlq") @@ -922,7 +922,7 @@ def run_module(): result = dict( src=src, dest="", - is_binary=is_binary, + binary=binary, checksum="", changed=False, data_set_type="", @@ -1001,7 +1001,7 @@ def run_module(): if ds_type in data_set.DataSet.MVS_SEQ: file_path = fetch_handler._fetch_mvs_data( src_data_set.name, - is_binary, + binary, encoding=encoding ) result["remote_path"] = file_path @@ -1014,14 +1014,14 @@ def run_module(): if is_member: file_path = fetch_handler._fetch_mvs_data( src_data_set.name, - is_binary, + binary, encoding=encoding ) result["remote_path"] = file_path else: result["remote_path"] = fetch_handler._fetch_pdse( src_data_set.name, - is_binary, + binary, encoding=encoding ) @@ -1036,7 +1036,7 @@ def run_module(): ) file_path = fetch_handler._fetch_uss_file( src, - is_binary, + binary, encoding=encoding ) result["remote_path"] = file_path @@ -1048,7 +1048,7 @@ def run_module(): elif ds_type in data_set.DataSet.MVS_VSAM: file_path = fetch_handler._fetch_vsam( src_data_set.name, - is_binary, + binary, encoding=encoding ) result["remote_path"] = file_path @@ -1060,7 +1060,7 @@ def run_module(): elif ds_type == "GDG": result["remote_path"] = fetch_handler._fetch_gdg( src_data_set.name, - is_binary, + binary, encoding=encoding ) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index dfa45fa6a8..098db6c67d 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -475,12 +475,12 @@ def generate_executable_uss(hosts, dir, src, src_jcl_call): @pytest.mark.uss @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=False), - dict(src="/etc/profile", is_file=True, is_binary=True, is_remote=False), - dict(src="Example inline content", is_file=False, is_binary=False, is_remote=False), - dict(src="Example inline content", is_file=False, is_binary=True, is_remote=False), - dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=True), - dict(src="/etc/profile", is_file=True, is_binary=True, is_remote=True), + dict(src="/etc/profile", is_file=True, binary=False, is_remote=False), + dict(src="/etc/profile", is_file=True, binary=True, is_remote=False), + dict(src="Example inline content", is_file=False, binary=False, is_remote=False), + dict(src="Example inline content", is_file=False, binary=True, is_remote=False), + dict(src="/etc/profile", is_file=True, binary=False, is_remote=True), + dict(src="/etc/profile", is_file=True, binary=True, is_remote=True), ]) def test_copy_file_to_non_existing_uss_file(ansible_zos_module, src): hosts = ansible_zos_module @@ -490,9 +490,9 @@ def test_copy_file_to_non_existing_uss_file(ansible_zos_module, src): hosts.all.file(path=dest_path, state="absent") if src["is_file"]: - copy_res = hosts.all.zos_copy(src=src["src"], dest=dest_path, is_binary=src["is_binary"], remote_src=src["is_remote"]) + copy_res = hosts.all.zos_copy(src=src["src"], dest=dest_path, binary=src["binary"], remote_src=src["is_remote"]) else: - copy_res = hosts.all.zos_copy(content=src["src"], dest=dest_path, is_binary=src["is_binary"]) + copy_res = hosts.all.zos_copy(content=src["src"], dest=dest_path, binary=src["binary"]) stat_res = hosts.all.stat(path=dest_path) for result in copy_res.contacted.values(): @@ -554,10 +554,10 @@ def test_copy_file_to_existing_uss_file(ansible_zos_module, src): @pytest.mark.uss @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_binary=False, is_remote=False), - dict(src="/etc/profile", is_binary=True, is_remote=False), - dict(src="/etc/profile", is_binary=False, is_remote=True), - dict(src="/etc/profile", is_binary=True, is_remote=True), + dict(src="/etc/profile", binary=False, is_remote=False), + dict(src="/etc/profile", binary=True, is_remote=False), + dict(src="/etc/profile", binary=False, is_remote=True), + dict(src="/etc/profile", binary=True, is_remote=True), ]) def test_copy_file_to_uss_dir(ansible_zos_module, src): hosts = ansible_zos_module @@ -567,7 +567,7 @@ def test_copy_file_to_uss_dir(ansible_zos_module, src): try: - copy_res = hosts.all.zos_copy(src=src["src"], dest=dest, is_binary=src["is_binary"], remote_src=src["is_remote"]) + copy_res = hosts.all.zos_copy(src=src["src"], dest=dest, binary=src["binary"], remote_src=src["is_remote"]) stat_res = hosts.all.stat(path=dest_path) for result in copy_res.contacted.values(): @@ -2550,7 +2550,7 @@ def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): src=src, dest=dest, remote_src=False, - is_binary=False + binary=False ) verify_copy = hosts.all.shell( @@ -2605,7 +2605,7 @@ def test_copy_file_crlf_endings_to_sequential_data_set(ansible_zos_module): src=src, dest=dest, remote_src=False, - is_binary=False + binary=False ) verify_copy = hosts.all.shell( @@ -2664,7 +2664,7 @@ def test_copy_file_crlf_endings_and_pound_to_seq_data_set(ansible_zos_module): "to": "IBM-285" }, remote_src=False, - is_binary=False + binary=False ) verify_copy = hosts.all.shell( @@ -2722,7 +2722,7 @@ def test_copy_local_binary_file_without_encoding_conversion(ansible_zos_module): src=src, dest=dest, remote_src=False, - is_binary=True + binary=True ) for cp_res in copy_result.contacted.values(): @@ -2766,7 +2766,7 @@ def test_copy_remote_binary_file_without_encoding_conversion(ansible_zos_module) src=src, dest=dest, remote_src=True, - is_binary=True + binary=True ) for cp_res in copy_result.contacted.values(): @@ -2783,12 +2783,12 @@ def test_copy_remote_binary_file_without_encoding_conversion(ansible_zos_module) @pytest.mark.uss @pytest.mark.seq @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=False), - dict(src="/etc/profile", is_file=True, is_binary=True, is_remote=False), - dict(src="Example inline content", is_file=False, is_binary=False, is_remote=False), - dict(src="Example inline content", is_file=False, is_binary=True, is_remote=False), - dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=True), - dict(src="/etc/profile", is_file=True, is_binary=True, is_remote=True), + dict(src="/etc/profile", is_file=True, binary=False, is_remote=False), + dict(src="/etc/profile", is_file=True, binary=True, is_remote=False), + dict(src="Example inline content", is_file=False, binary=False, is_remote=False), + dict(src="Example inline content", is_file=False, binary=True, is_remote=False), + dict(src="/etc/profile", is_file=True, binary=False, is_remote=True), + dict(src="/etc/profile", is_file=True, binary=True, is_remote=True), ]) def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): hosts = ansible_zos_module @@ -2798,9 +2798,9 @@ def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): hosts.all.zos_data_set(name=dest, state="absent") if src["is_file"]: - copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], is_binary=src["is_binary"]) + copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], binary=src["binary"]) else: - copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, remote_src=src["is_remote"], is_binary=src["is_binary"]) + copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, remote_src=src["is_remote"], binary=src["binary"]) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\" > /dev/null 2>/dev/null".format(dest), @@ -3151,12 +3151,12 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=False), - dict(src="/etc/profile", is_file=True, is_binary=True, is_remote=False), - dict(src="Example inline content", is_file=False, is_binary=False, is_remote=False), - dict(src="Example inline content", is_file=False, is_binary=True, is_remote=False), - dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=True), - dict(src="/etc/profile", is_file=True, is_binary=True, is_remote=True), + dict(src="/etc/profile", is_file=True, binary=False, is_remote=False), + dict(src="/etc/profile", is_file=True, binary=True, is_remote=False), + dict(src="Example inline content", is_file=False, binary=False, is_remote=False), + dict(src="Example inline content", is_file=False, binary=True, is_remote=False), + dict(src="/etc/profile", is_file=True, binary=False, is_remote=True), + dict(src="/etc/profile", is_file=True, binary=True, is_remote=True), ]) def test_copy_file_to_non_existing_member(ansible_zos_module, src): hosts = ansible_zos_module @@ -3175,9 +3175,9 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): ) if src["is_file"]: - copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, is_binary=src["is_binary"], remote_src=src["is_remote"]) + copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, binary=src["binary"], remote_src=src["is_remote"]) else: - copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, is_binary=src["is_binary"]) + copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, binary=src["binary"]) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\" > /dev/null 2>/dev/null".format(dest), @@ -3294,12 +3294,12 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", is_binary=False), - dict(type="seq", is_binary=True), - dict(type="pds", is_binary=False), - dict(type="pds", is_binary=True), - dict(type="pdse", is_binary=False), - dict(type="pdse", is_binary=True) + dict(type="seq", binary=False), + dict(type="seq", binary=True), + dict(type="pds", binary=False), + dict(type="pds", binary=True), + dict(type="pdse", binary=False), + dict(type="pdse", binary=True) ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module @@ -3319,7 +3319,7 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): ) hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) - copy_result = hosts.all.zos_copy(src=src, dest=dest, is_binary=args["is_binary"], remote_src=True) + copy_result = hosts.all.zos_copy(src=src, dest=dest, binary=args["binary"], remote_src=True) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), @@ -3691,7 +3691,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr src="{0}({1})".format(src_lib, pgm_mem), dest="{0}({1})".format(dest_lib, pgm_mem), remote_src=True, - is_executable=True, + executable=True, aliases=False ) # zos_copy w an executables and its alias: @@ -3699,7 +3699,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr src="{0}({1})".format(src_lib, pgm_mem), dest="{0}({1})".format(dest_lib_aliases, pgm_mem), remote_src=True, - is_executable=True, + executable=True, aliases=True ) @@ -3823,7 +3823,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): src="{0}({1})".format(src_lib, pgm_mem), dest=uss_dest, remote_src=True, - is_executable=True, + executable=True, replace=True) for result in copy_uss_res.contacted.values(): assert result.get("msg") is None @@ -3846,7 +3846,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): src="{0}".format(uss_dest), dest="{0}({1})".format(dest_lib, pgm_mem), remote_src=True, - is_executable=True, + executable=True, aliases=False ) # zos_copy from USS file w an executables and its alias: @@ -3854,7 +3854,7 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): src="{0}".format(uss_dest), dest="{0}({1})".format(dest_lib_aliases, pgm_mem), remote_src=True, - is_executable=True, + executable=True, aliases=True ) @@ -3998,7 +3998,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): src="{0}".format(src_lib), dest="{0}".format(dest_lib), remote_src=True, - is_executable=True, + executable=True, aliases=False, dest_data_set={ 'type': "library", @@ -4014,7 +4014,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): src="{0}".format(src_lib), dest="{0}".format(dest_lib_aliases), remote_src=True, - is_executable=True, + executable=True, aliases=True, dest_data_set={ 'type': "library", @@ -4032,7 +4032,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): src="{0}".format(src_lib), dest="{0}".format(dest_lib), remote_src=True, - is_executable=True, + executable=True, aliases=False ) # copy src loadlib to dest library pds w aliases @@ -4040,7 +4040,7 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): src="{0}".format(src_lib), dest="{0}".format(dest_lib_aliases), remote_src=True, - is_executable=True, + executable=True, aliases=True ) @@ -4213,7 +4213,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): copy_res = hosts.all.zos_copy( src=source_path, dest="{0}".format(dest_lib), - is_executable=True, + executable=True, aliases=False, dest_data_set={ 'type': "pdse", @@ -4229,7 +4229,7 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): copy_res = hosts.all.zos_copy( src=source_path, dest="{0}".format(dest_lib), - is_executable=True, + executable=True, aliases=False ) @@ -4353,7 +4353,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): src="{0}".format(src_lib), dest="{0}".format(uss_dir_path), remote_src=True, - is_executable=True, + executable=True, ) for result in copy_res_uss.contacted.values(): assert result.get("msg") is None @@ -4391,7 +4391,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): src="{0}/{1}".format(uss_dir_path, src_lib.upper()), dest="{0}".format(dest_lib), remote_src=True, - is_executable=True, + executable=True, aliases=False ) # copy USS dir to dest library pds w aliases @@ -4399,7 +4399,7 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): src="{0}/{1}".format(uss_dir_path, src_lib.upper()), dest="{0}".format(dest_lib_aliases), remote_src=True, - is_executable=True, + executable=True, aliases=True ) @@ -4480,7 +4480,7 @@ def test_copy_executables_uss_to_uss(ansible_zos_module): src=f"{c_dir}/hello_world", dest=dest_uss, remote_src=True, - is_executable=True, + executable=True, replace=True ) verify_exe_dst = hosts.all.shell(cmd=f"{c_dir}/hello_world_2") @@ -4527,7 +4527,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): src=f"{c_dir}/hello_world", dest="{0}({1})".format(dest, member), remote_src=True, - is_executable=True, + executable=True, replace=True ) cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" @@ -6382,7 +6382,7 @@ def test_copy_pdse_loadlib_to_pdse_loadlib_using_aliases(ansible_zos_module): src="{0}".format(src_lib_aliases), dest="{0}".format(dest_lib_aliases), remote_src=True, - is_executable=True, + executable=True, aliases=True, dest_data_set={ 'type': "library", @@ -6607,7 +6607,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member_with_pound(ansible_zos_mo src="{0}({1})".format(src_lib, pgm_mem), dest="{0}({1})".format(dest_lib, dest_pgm_mem), remote_src=True, - is_executable=True, + executable=True, aliases=False ) # zos_copy w an executables and its alias: @@ -6615,7 +6615,7 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member_with_pound(ansible_zos_mo src="{0}({1})".format(src_lib, pgm_mem), dest="{0}({1})".format(dest_lib_aliases, dest_pgm_mem), remote_src=True, - is_executable=True, + executable=True, aliases=True ) diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index e6673c136d..9752b309ae 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -364,7 +364,7 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): "src":test_vsam, "dest":"/tmp/", "flat":True, - "is_binary":True + "binary":True } results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -424,7 +424,7 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): "src":TEST_PDS_MEMBER, "dest":"/tmp/", "flat":True, - "is_binary":True + "binary":True } dest_path = "/tmp/" + extract_member_name(TEST_PDS_MEMBER) try: @@ -434,7 +434,7 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): assert result.get("data_set_type") == "Partitioned" assert result.get("module_stderr") is None assert result.get("dest") == dest_path - assert result.get("is_binary") is True + assert result.get("binary") is True assert os.path.exists(dest_path) assert os.path.isfile(dest_path) assert "msg" in result.keys() @@ -460,7 +460,7 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): "src":TEST_PS, "dest":"/tmp/", "flat":True, - "is_binary":True + "binary":True } dest_path = "/tmp/" + TEST_PS try: @@ -469,7 +469,7 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): assert result.get("changed") is True assert result.get("data_set_type") == "Sequential" assert result.get("module_stderr") is None - assert result.get("is_binary") is True + assert result.get("binary") is True assert os.path.exists(dest_path) assert "msg" in result.keys() assert result.get("src") is not None @@ -490,7 +490,7 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): "src":TEST_PDS, "dest":"/tmp/", "flat":True, - "is_binary":True + "binary":True } dest_path = "/tmp/" + TEST_PDS try: @@ -499,7 +499,7 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): assert result.get("changed") is True assert result.get("data_set_type") == "Partitioned" assert result.get("module_stderr") is None - assert result.get("is_binary") is True + assert result.get("binary") is True assert os.path.exists(dest_path) assert os.path.isdir(dest_path) assert "msg" in result.keys() diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py index a47dba912b..6b9ba4e338 100644 --- a/tests/functional/modules/test_zos_mount_func.py +++ b/tests/functional/modules/test_zos_mount_func.py @@ -198,7 +198,7 @@ def test_basic_mount_with_bpx_nomarker_nobackup(ansible_zos_module, volumes_on_s hosts.all.zos_copy( content=INITIAL_PRM_MEMBER, dest=tmp_file_filename, - is_binary=True, + binary=True, ) hosts.all.shell( cmd="chtag -t -c ISO8859-1 " + tmp_file_filename, @@ -214,7 +214,7 @@ def test_basic_mount_with_bpx_nomarker_nobackup(ansible_zos_module, volumes_on_s hosts.all.zos_copy( src=tmp_file_filename, dest=dest_path, - is_binary=True, + binary=True, remote_src=True, ) @@ -271,7 +271,7 @@ def test_basic_mount_with_bpx_no_utf_8_characters(ansible_zos_module, volumes_on hosts.all.zos_copy( src=tmp_file_filename, dest=dest_path, - is_binary=True, + binary=True, remote_src=True, ) @@ -326,7 +326,7 @@ def test_basic_mount_with_bpx_marker_backup(ansible_zos_module, volumes_on_syste hosts.all.zos_copy( content=INITIAL_PRM_MEMBER, dest=tmp_file_filename, - is_binary=True, + binary=True, ) # Make it readable at console hosts.all.shell( @@ -356,7 +356,7 @@ def test_basic_mount_with_bpx_marker_backup(ansible_zos_module, volumes_on_syste hosts.all.zos_copy( src=tmp_file_filename, dest=dest_path, - is_binary=True, + binary=True, remote_src=True, ) @@ -381,7 +381,7 @@ def test_basic_mount_with_bpx_marker_backup(ansible_zos_module, volumes_on_syste hosts.all.zos_copy( src=dest_path, dest=test_tmp_file_filename, - is_binary=True, + binary=True, remote_src=True, ) results = hosts.all.shell( diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 910e08eb56..1c97815549 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -1330,7 +1330,7 @@ def test_mvs_unarchive_single_data_set_remote_src( fetch_result = hosts.all.zos_fetch( src=mvs_dest_archive, dest=tmp_folder.name, - is_binary=True + binary=True ) for res in fetch_result.contacted.values(): From e39507bc2b59d29b910cac85cf1ced015bb01190 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Mon, 22 Sep 2025 12:39:10 +0530 Subject: [PATCH 57/73] Added msg in all error responses --- plugins/modules/zos_started_task.py | 50 +++++++++++++------ .../modules/test_zos_started_task_func.py | 47 ++++++++++++++--- 2 files changed, 75 insertions(+), 22 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index cf07cbb106..e7eb20f13a 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -92,6 +92,8 @@ while starting it. If job_name is not specified, then member_name is used as job_name. Otherwise, job_name is the started task job name used to find and apply the state selected. + - When state is displayed or modified or cancelled or stopped or forced, job_name is the + started task name. required: false type: str aliases: @@ -118,8 +120,8 @@ - member parameters: description: - - Program parameters passed to the started program, which might be a list in parentheses or - a string in single quotation marks + - Program parameters passed to the started program. + - Only applicable when state is started or modified otherwise ignored. required: false type: list elements: str @@ -166,6 +168,7 @@ - The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. + - Only applicable when state=started otherwise ignored. required: false type: str tcb_address: @@ -300,7 +303,7 @@ returned: failure or skipped type: str sample: - File /u/user/file.txt is already missing on the system, skipping script + Command parameters are invalid. rc: description: - The return code is 0 when command executed successfully. @@ -521,7 +524,7 @@ zoau_exceptions = ZOAUImportError(traceback.format_exc()) -def execute_command(operator_cmd, started_task_name, execute_display_before=False, execute_display_after=False, timeout_s=1, **kwargs): +def execute_command(operator_cmd, started_task_name, execute_display_before=False, execute_display_after=False, timeout_s=0, **kwargs): """Execute operator command. Parameters @@ -545,19 +548,16 @@ def execute_command(operator_cmd, started_task_name, execute_display_before=Fals # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: timeout_c = 100 * timeout_s if execute_display_before: - task_params = execute_display_command(started_task_name, timeout_c) + task_params = execute_display_command(started_task_name) response = opercmd.execute(operator_cmd, timeout_c, **kwargs) - if execute_display_after: - task_params = execute_display_command(started_task_name, timeout_c) - rc = response.rc stdout = response.stdout_response stderr = response.stderr_response return rc, stdout, stderr, task_params -def execute_display_command(started_task_name, timeout_s): +def execute_display_command(started_task_name, timeout=0): """Execute operator display command. Parameters @@ -573,7 +573,7 @@ def execute_display_command(started_task_name, timeout_s): List contains extracted parameters from display command output of started task """ cmd = "d a," + started_task_name - display_response = opercmd.execute(cmd, timeout_s) + display_response = opercmd.execute(cmd, timeout) task_params = [] if display_response.rc == 0 and display_response.stderr_response == "": task_params = extract_keys(display_response.stdout_response) @@ -1299,12 +1299,24 @@ def run_module(): NON-CANCELABLE: When cancel command can't stop job and force command is needed. CANCELABLE: When force command used without using cancel command """ - start_errmsg = ['ERROR', 'INVALID PARAMETER'] + start_errmsg = ['JCL ERROR', 'INVALID PARAMETER', 'DELIMITER ERROR', 'ERROR'] stop_errmsg = ['NOT ACTIVE', 'INVALID PARAMETER'] display_errmsg = ['NOT ACTIVE', 'INVALID PARAMETER'] modify_errmsg = ['REJECTED', 'NOT ACTIVE', 'INVALID PARAMETER'] cancel_errmsg = ['NOT ACTIVE', 'NOT LOGGED ON', 'INVALID PARAMETER', 'DUPLICATE NAME FOUND', 'NON-CANCELABLE'] force_errmsg = ['NOT ACTIVE', 'NOT LOGGED ON', 'INVALID PARAMETER', 'CANCELABLE', 'DUPLICATE NAME FOUND'] + error_details = { + 'JCL ERROR': 'Member is missing in PROCLIB or JCL is invalid or issue with JCL execution.', + 'INVALID PARAMETER': 'Command parameters are invalid.', + 'DELIMITER ERROR': 'Command parameters are invalid.', + 'ERROR': 'Member is missing in PROCLIB or JCL is invalid or issue with JCL execution.', + 'NOT ACTIVE': 'Started task is not active', + 'REJECTED': 'Started task is not accepting modification.', + 'NOT LOGGED ON': 'TSO user session is not active.', + 'DUPLICATE NAME FOUND': 'Multiple started tasks are running with same name.', + 'NON-CANCELABLE': 'Started task can not be cancelled.', + 'CANCELABLE': 'Started task should be cancelled.' + } err_msg = [] kwargs = {} @@ -1346,13 +1358,18 @@ def run_module(): rc, out, err, task_params = execute_command(cmd, started_task_name, execute_display_before, execute_display_after, timeout_s=wait_time_s, **kwargs) isFailed = False system_logs = "" - if err != "" or any(msg in out for msg in err_msg): + msg = "" + found_msg = next((msg for msg in err_msg if msg in out), None) + if err != "" or found_msg: isFailed = True # Fetch system logs to validate any error occured in execution if not isFailed or verbose: system_logs = fetch_logs(cmd.upper(), wait_time_s) - if any(msg in system_logs for msg in err_msg): - isFailed = True + # If sysout is not having error, then check system log as well to make sure no error occured + if not isFailed: + found_msg = next((msg for msg in err_msg if msg in system_logs), None) + if found_msg: + isFailed = True if not verbose: system_logs = "" current_state = "" @@ -1360,6 +1377,7 @@ def run_module(): if rc == 0: rc = 1 changed = False + msg = error_details[found_msg] stdout = out stderr = err if err == "" or err is None: @@ -1372,6 +1390,8 @@ def run_module(): stderr = err if state == "displayed": task_params = extract_keys(out) + elif execute_display_after: + task_params = execute_display_command(started_task_name) result = dict() @@ -1390,6 +1410,8 @@ def run_module(): stderr_lines=stderr.split('\n'), verbose_output=system_logs ) + if msg: + result['msg'] = msg module.exit_json(**result) diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index 6d922fa021..39c49a8d51 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -42,6 +42,7 @@ # Input arguments validation def test_start_task_with_invalid_member(ansible_zos_module): hosts = ansible_zos_module + # Check with non-existing member start_results = hosts.all.zos_started_task( state = "started", member_name = "SAMTASK" @@ -49,7 +50,8 @@ def test_start_task_with_invalid_member(ansible_zos_module): for result in start_results.contacted.values(): assert result.get("changed") is False assert result.get("stderr") is not None - + assert result.get("msg") is not None + # Validating with member name more than 8 chars start_results = hosts.all.zos_started_task( state = "started", member_name = "SAMPLETASK" @@ -61,6 +63,7 @@ def test_start_task_with_invalid_member(ansible_zos_module): def test_start_task_with_jobname_identifier(ansible_zos_module): hosts = ansible_zos_module + # validate jobname and identifier with non-existing member start_results = hosts.all.zos_started_task( state = "started", member_name = "SAMPLE", @@ -74,17 +77,19 @@ def test_start_task_with_jobname_identifier(ansible_zos_module): def test_start_task_with_invalid_identifier(ansible_zos_module): hosts = ansible_zos_module + # validate using invalid identifier start_results = hosts.all.zos_started_task( state = "started", member_name = "SAMPTASK", identifier = "$HELLO" ) - for result in start_results.contacted.values(): assert result.get("changed") is False assert result.get("failed") is True assert result.get("stderr") is not None - + assert result.get("msg") is not None + + # validate using proper identifier and non-existing member start_results = hosts.all.zos_started_task( state = "started", member_name = "SAMPLE", @@ -94,16 +99,17 @@ def test_start_task_with_invalid_identifier(ansible_zos_module): assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "S SAMPLE.HELLO" + assert result.get("msg") is not None def test_start_task_with_invalid_jobaccount(ansible_zos_module): hosts = ansible_zos_module job_account = "(T043JM,JM00,1,0,0,This is the invalid job account information to test negative scenario)" + # validate invalid job_account with non-existing member start_results = hosts.all.zos_started_task( state = "started", member_name = "SAMPLE", job_account = job_account ) - for result in start_results.contacted.values(): assert result.get("changed") is False assert result.get("failed") is True @@ -111,12 +117,12 @@ def test_start_task_with_invalid_jobaccount(ansible_zos_module): def test_start_task_with_invalid_devicenum(ansible_zos_module): hosts = ansible_zos_module + # validate invalid devicenum with non-existing member start_results = hosts.all.zos_started_task( state = "started", member_name = "SAMPLE", device_number = "0870" ) - for result in start_results.contacted.values(): assert result.get("changed") is False assert result.get("failed") is True @@ -129,11 +135,11 @@ def test_start_task_with_invalid_volumeserial(ansible_zos_module): member_name = "SAMPLE", volume_serial = "12345A" ) - for result in start_results.contacted.values(): assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "S SAMPLE,,12345A" + assert result.get("msg") is not None def test_start_task_with_invalid_parameters(ansible_zos_module): hosts = ansible_zos_module @@ -142,11 +148,11 @@ def test_start_task_with_invalid_parameters(ansible_zos_module): member_name = "SAMPLE", parameters = ["KEY1"] ) - for result in start_results.contacted.values(): assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "S SAMPLE,,,'KEY1'" + assert result.get("msg") is not None start_results = hosts.all.zos_started_task( state = "started", @@ -154,11 +160,11 @@ def test_start_task_with_invalid_parameters(ansible_zos_module): parameters = ["KEY1", "KEY2", "KEY3"], volume_serial = "123456" ) - for result in start_results.contacted.values(): assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "S SAMPLE,,123456,(KEY1,KEY2,KEY3)" + assert result.get("msg") is not None def test_start_task_with_devicenum_devicetype_negative(ansible_zos_module): hosts = ansible_zos_module @@ -200,6 +206,7 @@ def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None + start_results = hosts.all.zos_started_task( state = "started", member_name = "VLF", @@ -211,6 +218,7 @@ def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): assert result.get("changed") is False assert result.get("failed") is True assert result.get("msg") is not None + start_results = hosts.all.zos_started_task( state = "started", member_name = "VLF", @@ -223,6 +231,8 @@ def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == 'S VLF,KEY1=VALUE1,KEY2=VALUE2' + assert result.get("msg") is not None + assert result.get("verbose_output") == "" def test_start_task_using_nonexisting_devicenum_negative(ansible_zos_module): @@ -236,6 +246,8 @@ def test_start_task_using_nonexisting_devicenum_negative(ansible_zos_module): assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == 'S SAMPLE,/ABCD' + assert result.get("msg") is not None + assert result.get("verbose_output") == "" def test_display_task_negative(ansible_zos_module): hosts = ansible_zos_module @@ -259,6 +271,7 @@ def test_stop_task_negative(ansible_zos_module): assert result.get("changed") is False assert result.get("failed") is True assert result.get("stderr") is not None + assert result.get("msg") is not None stop_results = hosts.all.zos_started_task( state = "stopped", @@ -269,6 +282,7 @@ def test_stop_task_negative(ansible_zos_module): assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "P TESTER.SAMPLE" + assert result.get("msg") is not None def test_modify_task_negative(ansible_zos_module): hosts = ansible_zos_module @@ -300,6 +314,7 @@ def test_modify_task_negative(ansible_zos_module): assert result.get("changed") is False assert result.get("stderr") is not None assert result.get("cmd") == "F TESTER.SAMPLE,REPLACE,VX=10" + assert result.get("msg") is not None def test_cancel_task_negative(ansible_zos_module): hosts = ansible_zos_module @@ -322,6 +337,8 @@ def test_cancel_task_negative(ansible_zos_module): assert result.get("stderr") is not None assert result.get("cmd") == "C TESTER.SAMPLE" assert result.get("verbose_output") == "" + assert result.get("msg") is not None + cancel_results = hosts.all.zos_started_task( state = "cancelled", asid = "0012", @@ -334,6 +351,7 @@ def test_cancel_task_negative(ansible_zos_module): assert result.get("stderr") is not None assert result.get("cmd") == "C U=OMVSTEST,A=0012,DUMP" assert result.get("verbose_output") != "" + assert result.get("msg") is not None cancel_results = hosts.all.zos_started_task( state = "cancelled", userid = "OMVSADM", @@ -456,7 +474,9 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): for result in force_results.contacted.values(): assert result.get("changed") is False assert result.get("stderr") is not None + assert len(result.get("tasks")) > 0 assert result.get("cmd") == "FORCE SAMPLE" + assert result.get("msg") is not None assert "CANCELABLE - ISSUE CANCEL BEFORE FORCE" in result.get("stderr") stop_results = hosts.all.zos_started_task( @@ -587,6 +607,7 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): for result in start_results.contacted.values(): assert result.get("changed") is True assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 assert result.get("stderr") == "" stop_results = hosts.all.zos_started_task( @@ -597,6 +618,7 @@ def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): for result in stop_results.contacted.values(): assert result.get("changed") is True assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 assert result.get("stderr") == "" finally: @@ -618,6 +640,7 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): for result in modify_results.contacted.values(): assert result.get("changed") is True assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 assert result.get("stderr") == "" assert result.get("cmd") == "F VLF,REPLACE,NN=00" @@ -643,6 +666,7 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): for result in stop_results.contacted.values(): assert result.get("changed") is True assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 assert result.get("stderr") == "" assert result.get("cmd") == f"P VLF,A={asid_val}" @@ -655,6 +679,7 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): for result in start_results.contacted.values(): assert result.get("changed") is True assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 assert result.get("stderr") == "" modify_results = hosts.all.zos_started_task( @@ -667,6 +692,7 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 assert result.get("cmd") == "F VLF.TESTER,REPLACE,NN=00" stop_results = hosts.all.zos_started_task( @@ -678,6 +704,7 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 start_results = hosts.all.zos_started_task( state = "started", @@ -686,6 +713,7 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): ) for result in start_results.contacted.values(): assert result.get("changed") is True + assert len(result.get("tasks")) > 0 assert result.get("rc") == 0 assert result.get("stderr") == "" @@ -720,6 +748,7 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 assert result.get("verbose_output") != "" stop_results = hosts.all.zos_started_task( @@ -731,6 +760,7 @@ def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 finally: hosts.all.file(path=temp_path, state="absent") @@ -762,6 +792,7 @@ def test_force_and_start_with_icsf_task(ansible_zos_module): assert result.get("changed") is False assert result.get("rc") == 1 assert result.get("stderr") != "" + assert len(result.get("tasks")) > 0 asid = result.get("tasks")[0].get("asid") force_results = hosts.all.zos_started_task( From 9b459efad63637c56bf4d38083eab93cd00bf613 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Mon, 22 Sep 2025 14:24:05 +0530 Subject: [PATCH 58/73] Replacing error strings with error codes --- plugins/modules/zos_started_task.py | 43 +++++++++++++++-------------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index e7eb20f13a..f5ae913805 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -1290,32 +1290,33 @@ def run_module(): Below error messages are used to detrmine if response has any error.When response could have any of below error message has explained below. - ERROR: Response contains this keyword when JCL contains syntax error. - INVALID PARAMETER: When invalid parameter passed in command line. - NOT ACTIVE: When started task with the given job name is not active + JCL ERROR - IEE122I: Response contains this keyword when JCL contains syntax error. + INVALID PARAMETER - IEE535I: When invalid parameter passed in command line. + NOT ACTIVE - IEE341I: When started task with the given job name is not active REJECTED: When modify command is not supported by respective started task. - NOT LOGGED ON: When invalid userid passed in command. - DUPLICATE NAME FOUND: When multiple started tasks exist with same name. - NON-CANCELABLE: When cancel command can't stop job and force command is needed. - CANCELABLE: When force command used without using cancel command + NOT LOGGED ON - IEE324I: When invalid userid passed in command. + DUPLICATE NAME FOUND - IEE842I: When multiple started tasks exist with same name. + NON-CANCELABLE - IEE838I: When cancel command can't stop job and force command is needed. + CANCELABLE - IEE838I: When force command used without using cancel command """ - start_errmsg = ['JCL ERROR', 'INVALID PARAMETER', 'DELIMITER ERROR', 'ERROR'] - stop_errmsg = ['NOT ACTIVE', 'INVALID PARAMETER'] - display_errmsg = ['NOT ACTIVE', 'INVALID PARAMETER'] - modify_errmsg = ['REJECTED', 'NOT ACTIVE', 'INVALID PARAMETER'] - cancel_errmsg = ['NOT ACTIVE', 'NOT LOGGED ON', 'INVALID PARAMETER', 'DUPLICATE NAME FOUND', 'NON-CANCELABLE'] - force_errmsg = ['NOT ACTIVE', 'NOT LOGGED ON', 'INVALID PARAMETER', 'CANCELABLE', 'DUPLICATE NAME FOUND'] + start_errmsg = ['IEE122I', 'IEE535I', 'IEE307I', 'ERROR'] + stop_errmsg = ['IEE341I', 'IEE535I'] + display_errmsg = ['IEE341I', 'IEE535I', 'NOT FOUND'] + modify_errmsg = ['REJECTED', 'IEE341I', 'IEE535I', 'IEE311I'] + cancel_errmsg = ['IEE341I', 'IEE324I', 'IEE535I', 'IEE842I', 'NON-CANCELABLE'] + force_errmsg = ['IEE341I', 'IEE324I', 'IEE535I', 'CANCELABLE', 'IEE842I'] error_details = { - 'JCL ERROR': 'Member is missing in PROCLIB or JCL is invalid or issue with JCL execution.', - 'INVALID PARAMETER': 'Command parameters are invalid.', - 'DELIMITER ERROR': 'Command parameters are invalid.', + 'IEE122I': 'Specified member is missing or PROC/JOB contains incorrect JCL statements.', + 'IEE535I': 'A parameter on a command is not valid.', + 'IEE307I': 'Command parameter punctuation is incorrect or parameter is not followed by a blank.', 'ERROR': 'Member is missing in PROCLIB or JCL is invalid or issue with JCL execution.', - 'NOT ACTIVE': 'Started task is not active', + 'IEE341I': 'Started task is not active', 'REJECTED': 'Started task is not accepting modification.', - 'NOT LOGGED ON': 'TSO user session is not active.', - 'DUPLICATE NAME FOUND': 'Multiple started tasks are running with same name.', - 'NON-CANCELABLE': 'Started task can not be cancelled.', - 'CANCELABLE': 'Started task should be cancelled.' + 'IEE324I': 'The userid specified on the command is not currently active in the system..', + 'IEE842I': 'More than one active job with the specified name exist.', + 'NON-CANCELABLE': 'The task cannot be canceled. Use the FORCE ARM command.', + 'CANCELABLE': 'The task can be canceled. Use the CANCEL command.', + 'IEE311I': 'Required parameter is missing.' } err_msg = [] kwargs = {} From d40565e5b056ec8c66722d90556421d60f8d88a0 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Mon, 22 Sep 2025 17:15:26 +0530 Subject: [PATCH 59/73] Updating documentation --- plugins/modules/zos_started_task.py | 171 +++++++++++++++------------- 1 file changed, 89 insertions(+), 82 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index f5ae913805..4bff7dda68 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -30,6 +30,7 @@ arm: description: - I(arm) indicates to execute normal task termination routines without causing address space destruction. + - Only applicable when state is forced, otherwise is ignored. required: false type: bool armrestart: @@ -45,14 +46,13 @@ description: - When state is cancelled or stopped or forced, asid is the hexadecimal address space identifier of the work unit you want to cancel, stop or force. - - When state=displayed, asid is the hexadecimal address space identifier of the work unit of - the task you get details from. + - Only applicable when state is stopped or cancelled or forced, otherwise is ignored. required: false type: str device_type: description: - Option device_type is the type of the output device (if any) associated with the task. - - Only applicable when state=started otherwise ignored. + - Only applicable when state is started otherwise ignored. required: false type: str device_number: @@ -67,7 +67,7 @@ description: - A dump is to be taken. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) depends on the JCL for the job. - - Only applicable when state=cancelled otherwise ignored. + - Only applicable when state is cancelled otherwise ignored. required: false type: bool identifier_name: @@ -83,7 +83,7 @@ - Option job_account specifies accounting data in the JCL JOB statement for the started task. If the source JCL was a job and has already accounting data, the value that is specified on this parameter overrides the accounting data in the source JCL. - - Only applicable when state=started otherwise ignored. + - Only applicable when state is started otherwise ignored. required: false type: str job_name: @@ -105,7 +105,7 @@ - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than 44 characters in length. - - Only applicable when state=started otherwise ignored. + - Only applicable when state is started otherwise ignored. required: false type: dict member_name: @@ -113,7 +113,7 @@ - Option member_name is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL for the task to be started. The member can be either a job or a cataloged procedure. - - Only applicable when state=started otherwise ignored. + - Only applicable when state is started otherwise ignored. required: false type: str aliases: @@ -128,7 +128,7 @@ retry: description: - I(retry) is applicable for only FORCE TCB. - - Only applicable when state=forced otherwise ignored. + - Only applicable when state= is forced otherwise ignored. required: false type: str choices: @@ -139,7 +139,7 @@ - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. - - Only applicable when state=started otherwise ignored. + - Only applicable when state is started otherwise ignored. required: false type: str choices: @@ -168,26 +168,26 @@ - The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. - - Only applicable when state=started otherwise ignored. + - Only applicable when state is started otherwise ignored. required: false type: str tcb_address: description: - I(tcb_address) is a 6-digit hexadecimal TCB address of the task to terminate. - - Only applicable when state=forced otherwise ignored. + - Only applicable when state is forced otherwise ignored. required: false type: str volume_serial: description: - If devicetype is a tape or direct-access device, the volume serial number of the volume is mounted on the device. - - Only applicable when state=started otherwise ignored. + - Only applicable when state is started otherwise ignored. required: false type: str userid: description: - The user ID of the time-sharing user you want to cancel or force. - - Only applicable when state=cancelled or state=forced , otherwise ignored. + - Only applicable when state= is cancelled or forced , otherwise ignored. required: false type: str verbose: @@ -290,108 +290,114 @@ RETURN = r""" changed: description: - True if the state was changed, otherwise False. + - True if the state was changed, otherwise False. returned: always type: bool cmd: - description: Command executed via opercmd. - returned: changed - type: str - sample: S SAMPLE + description: + - Command executed via opercmd. + returned: changed + type: str + sample: S SAMPLE msg: - description: Failure or skip message returned by the module. - returned: failure or skipped - type: str - sample: - Command parameters are invalid. + description: + - Failure or skip message returned by the module. + returned: failure or skipped + type: str + sample: Command parameters are invalid. rc: - description: + description: - The return code is 0 when command executed successfully. - The return code is 1 when opercmd throws any error. - The return code is 5 when any parameter validation failed. - returned: changed - type: int - sample: 0 + returned: changed + type: int + sample: 0 state: - description: The final state of the started task, after execution.. - returned: changed - type: str - sample: S SAMPLE + description: + - The final state of the started task, after execution.. + returned: changed + type: str + sample: S SAMPLE stderr: - description: The STDERR from the command, may be empty. - returned: changed - type: str - sample: An error has ocurred. + description: + - The STDERR from the command, may be empty. + returned: changed + type: str + sample: An error has ocurred. stderr_lines: - description: List of strings containing individual lines from STDERR. - returned: changed - type: list - sample: ["An error has ocurred"] + description: + - List of strings containing individual lines from STDERR. + returned: changed + type: list + sample: ["An error has ocurred"] stdout: - description: The STDOUT from the command, may be empty. - returned: changed - type: str - sample: ISF031I CONSOLE OMVS0000 ACTIVATED. + description: + - The STDOUT from the command, may be empty. + returned: changed + type: str + sample: ISF031I CONSOLE OMVS0000 ACTIVATED. stdout_lines: - description: List of strings containing individual lines from STDOUT. - returned: changed - type: list - sample: ["Allocation to SYSEXEC completed."] + description: + - List of strings containing individual lines from STDOUT. + returned: changed + type: list + sample: ["Allocation to SYSEXEC completed."] tasks: description: - The output information for a list of started tasks matching specified criteria. - If no started task is found then this will return empty. + - The output information for a list of started tasks matching specified criteria. + - If no started task is found then this will return empty. returned: success type: list elements: dict contains: address_space_second_table_entry: description: - The control block used to manage memory for a started task + - The control block used to manage memory for a started task type: str sample: 03E78500 affinity: description: - The identifier of the processor, for up to any four processors, if the job requires the services of specific processors. - affinity=NONE means the job can run on any processor. + - The identifier of the processor, for up to any four processors, if the job requires the services of specific processors. + - affinity=NONE means the job can run on any processor. type: str sample: NONE asid: description: - Address space identifier (ASID), in hexadecimal. + - Address space identifier (ASID), in hexadecimal. type: str sample: 0054 cpu_time: description: - The processor time used by the address space, including the initiator. This time does not include SRB time. - cpu_time has one of these below formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. - sss.tttS when time is less than 1000 seconds - hh.mm.ss when time is at least 1000 seconds, but less than 100 hours - hhhhh.mm when time is at least 100 hours - ******** when time exceeds 100000 hours - NOTAVAIL when the TOD clock is not working + - The processor time used by the address space, including the initiator. This time does not include SRB time. + - cpu_time has one of these below formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. + sss.tttS when time is less than 1000 seconds + hh.mm.ss when time is at least 1000 seconds, but less than 100 hours + hhhhh.mm when time is at least 100 hours + ******** when time exceeds 100000 hours + NOTAVAIL when the TOD clock is not working type: str sample: 000.008S dataspaces: description: - The started task dataspaces details. + - The started task dataspaces details. returned: success type: list elements: dict contains: data_space_address_entry: description: - Central address of the data space ASTE. + - Central address of the data space ASTE. type: str sample: 058F2180 dataspace_name: description: - Data space name associated with the address space. + - Data space name associated with the address space. type: str sample: CIRRGMAP domain_number: description: - domain_number=N/A if the system is operating in goal mode. + - domain_number=N/A if the system is operating in goal mode. type: str sample: N/A elapsed_time: @@ -409,7 +415,7 @@ sample: 812.983S priority: description: - The priority of a started task is determined by the Workload Manager (WLM), based on the service class and importance assigned to it. + - The priority of a started task is determined by the Workload Manager (WLM), based on the service class and importance assigned to it. type: str sample: 1 proc_step_name: @@ -422,45 +428,45 @@ sample: VLF program_event_recording: description: - YES if A PER trap is active in the address space. - NO if No PER trap is active in the address space. + - YES if A PER trap is active in the address space. + - NO if No PER trap is active in the address space. type: str sample: NO program_name: description: - program_name=N/A if the system is operating in goal mode. + - program_name=N/A if the system is operating in goal mode. type: str sample: N/A queue_scan_count: description: - YES if the address space has been quiesced. - NO if the address space is not quiesced. + - YES if the address space has been quiesced. + - NO if the address space is not quiesced. type: str sample: NO resource_group: description: - The name of the resource group currently associated the service class. It can also be N/A if there is no resource group association. + - The name of the resource group currently associated the service class. It can also be N/A if there is no resource group association. type: str sample: N/A server: description: - YES if the address space is a server. - No if the address space is not a server. + - YES if the address space is a server. + - No if the address space is not a server. type: str sample: NO started_class_list: description: - The name of the service class currently associated with the address space. + - The name of the service class currently associated with the address space. type: str sample: SYSSTC started_time: description: - The time when the started task started. + - The time when the started task started. type: str sample: "2025-09-11 18:21:50.293644+00:00" system_management_control: description: - Number of outstanding step-must-complete requests. + - Number of outstanding step-must-complete requests. type: str sample: 000 task_identifier: @@ -500,10 +506,11 @@ type: str sample: SYSTEM verbose_output: - description: If C(verbose=true), the system log related to the started task executed state will be shown. - returned: changed - type: list - sample: NC0000000 ZOSMACHINE 25240 12:40:30.15 OMVS0000 00000210.... + description: + - If C(verbose=true), the system log related to the started task executed state will be shown. + returned: changed + type: list + sample: NC0000000 ZOSMACHINE 25240 12:40:30.15 OMVS0000 00000210.... """ from ansible.module_utils.basic import AnsibleModule @@ -524,7 +531,7 @@ zoau_exceptions = ZOAUImportError(traceback.format_exc()) -def execute_command(operator_cmd, started_task_name, execute_display_before=False, execute_display_after=False, timeout_s=0, **kwargs): +def execute_command(operator_cmd, started_task_name, execute_display_before=False, timeout_s=0, **kwargs): """Execute operator command. Parameters @@ -1356,7 +1363,7 @@ def run_module(): changed = False stdout = "" stderr = "" - rc, out, err, task_params = execute_command(cmd, started_task_name, execute_display_before, execute_display_after, timeout_s=wait_time_s, **kwargs) + rc, out, err, task_params = execute_command(cmd, started_task_name, execute_display_before, timeout_s=wait_time_s, **kwargs) isFailed = False system_logs = "" msg = "" From 61fa37bf44452dac9df483576e02e2eb462496e0 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Mon, 22 Sep 2025 22:19:10 +0530 Subject: [PATCH 60/73] Update zos_started_task.rst --- docs/source/modules/zos_started_task.rst | 485 ++++++++++++++++++++++- 1 file changed, 463 insertions(+), 22 deletions(-) diff --git a/docs/source/modules/zos_started_task.rst b/docs/source/modules/zos_started_task.rst index e7d97f8155..b711fc39f0 100644 --- a/docs/source/modules/zos_started_task.rst +++ b/docs/source/modules/zos_started_task.rst @@ -26,43 +26,80 @@ Parameters ---------- +arm + *arm* indicates to execute normal task termination routines without causing address space destruction. + + Only applicable when state is forced, otherwise is ignored. + + | **required**: False + | **type**: bool + + +armrestart + Indicates that the batch job or started task should be automatically restarted after the cancel completes, if it is registered as an element of the automatic restart manager. If the job or task is not registered or if you do not specify this parameter, MVS will not automatically restart the job or task. + + Only applicable when state is cancelled or forced, otherwise is ignored. + + | **required**: False + | **type**: bool + + asid - *asid* is a unique address space identifier which gets assigned to each running started task. + When state is cancelled or stopped or forced, asid is the hexadecimal address space identifier of the work unit you want to cancel, stop or force. + + Only applicable when state is stopped or cancelled or forced, otherwise is ignored. | **required**: False | **type**: str device_type - *device_type* is the type of the output device (if any) associated with the task. + Option device_type is the type of the output device (if any) associated with the task. + + Only applicable when state is started otherwise ignored. | **required**: False | **type**: str device_number - *device_number* is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. + Option device_number is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. + + Only applicable when state=started otherwise ignored. | **required**: False | **type**: str +dump + A dump is to be taken. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) depends on the JCL for the job. + + Only applicable when state is cancelled otherwise ignored. + + | **required**: False + | **type**: bool + + identifier_name - *identifier_name* is the name that identifies the task to be started. This name can be up to 8 characters long. The first character must be alphabetical. + Option identifier_name is the name that identifies the task. This name can be up to 8 characters long. The first character must be alphabetical. | **required**: False | **type**: str job_account - *job_account* specifies accounting data in the JCL JOB statement for the started task. If the source JCL was a job and has already accounting data, the value that is specified on this parameter overrides the accounting data in the source JCL. + Option job_account specifies accounting data in the JCL JOB statement for the started task. If the source JCL was a job and has already accounting data, the value that is specified on this parameter overrides the accounting data in the source JCL. + + Only applicable when state is started otherwise ignored. | **required**: False | **type**: str job_name - *job_name* is a name which should be assigned to a started task while starting it. If job_name is not specified, then member_name is used as job_name. + When state=started job_name is a name which should be assigned to a started task while starting it. If job_name is not specified, then member_name is used as job_name. Otherwise, job_name is the started task job name used to find and apply the state selected. + + When state is displayed or modified or cancelled or stopped or forced, job_name is the started task name. | **required**: False | **type**: str @@ -71,46 +108,79 @@ job_name keyword_parameters Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than 44 characters in length. + Only applicable when state is started otherwise ignored. + | **required**: False - | **type**: str + | **type**: dict member_name - *member_name* is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL for the task to be started. The member can be either a job or a cataloged procedure. + Option member_name is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL for the task to be started. The member can be either a job or a cataloged procedure. + + Only applicable when state is started otherwise ignored. | **required**: False | **type**: str -operation - The started task operation which needs to be performed. +parameters + Program parameters passed to the started program. - If *operation=start* and the data set does not exist on the managed node, no action taken, module completes successfully with *changed=False*. + Only applicable when state is started or modified otherwise ignored. + | **required**: False + | **type**: list + | **elements**: str - | **required**: True - | **type**: str - | **choices**: start, stop, modify, display, force, cancel +retry + *retry* is applicable for only FORCE TCB. -parameters - Program parameters passed to the started program, which might be a list in parentheses or a string in single quotation marks + Only applicable when state= is forced otherwise ignored. | **required**: False | **type**: str + | **choices**: YES, NO reus_asid When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. + Only applicable when state is started otherwise ignored. + | **required**: False | **type**: str | **choices**: YES, NO -subsystem_name +state + *state* should be the desired state of the started task after the module is executed. + + If state is started and the respective member is not present on the managed node, then error will be thrown with rc=1, changed=false and stderr which contains error details. + + If state is cancelled , modified, displayed, stopped or forced and the started task is not running on the managed node, then error will be thrown with rc=1, changed=false and stderr contains error details. + + If state is displayed and the started task is running, then the module will return the started task details along with changed=true. + + | **required**: True + | **type**: str + | **choices**: started, displayed, modified, cancelled, stopped, forced + + +subsystem The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. + Only applicable when state is started otherwise ignored. + + | **required**: False + | **type**: str + + +tcb_address + *tcb_address* is a 6-digit hexadecimal TCB address of the task to terminate. + + Only applicable when state is forced otherwise ignored. + | **required**: False | **type**: str @@ -118,27 +188,50 @@ subsystem_name volume_serial If devicetype is a tape or direct-access device, the volume serial number of the volume is mounted on the device. + Only applicable when state is started otherwise ignored. + + | **required**: False + | **type**: str + + +userid + The user ID of the time-sharing user you want to cancel or force. + + Only applicable when state= is cancelled or forced , otherwise ignored. + | **required**: False | **type**: str verbose - Return System logs that describe the task's execution. + When verbose=true return system logs that describe the task execution. Using this option will can return a big response depending on system load, also it could surface other programs activity. | **required**: False | **type**: bool | **default**: False -wait_time_s - Option *wait_time_s* is the total time that module `zos_started_tak <./zos_started_task.html>`_ will wait for a submitted task. The time begins when the module is executed on the managed node. +wait_time + Option wait_time is the total time that module zos_started_task will wait for a submitted task in centiseconds. The time begins when the module is executed on the managed node. Default value of 0 means to wait the default amount of time supported by the opercmd utility. | **required**: False | **type**: int - | **default**: 5 + | **default**: 0 + +Attributes +---------- +action + | **support**: none + | **description**: Indicates this has a corresponding action plugin so some parts of the options can be executed on the controller. +async + | **support**: full + | **description**: Supports being used with the ``async`` keyword. +check_mode + | **support**: full + | **description**: Can run in check_mode and return changed status prediction without modifying target. If not supported, the action will be skipped. @@ -150,15 +243,363 @@ Examples - name: Start a started task using member name. zos_started_task: + state: "started" + member: "PROCAPP" + - name: Start a started task using member name and identifier. + zos_started_task: + state: "started" member: "PROCAPP" - operation: "start" + identifier: "SAMPLE" + - name: Start a started task using member name and job. + zos_started_task: + state: "started" + member: "PROCAPP" + job_name: "SAMPLE" + - name: Start a started task using member name, job and enable verbose. + zos_started_task: + state: "started" + member: "PROCAPP" + job_name: "SAMPLE" + verbose: True + - name: Start a started task using member name, subsystem and enable reuse asid. + zos_started_task: + state: "started" + member: "PROCAPP" + subsystem: "MSTR" + reus_asid: "YES" + - name: Display a started task using started task name. + zos_started_task: + state: "displayed" + task_name: "PROCAPP" + - name: Display started tasks using matching regex. + zos_started_task: + state: "displayed" + task_name: "s*" + - name: Display all started tasks. + zos_started_task: + state: "displayed" + task_name: "all" + - name: Cancel a started tasks using task name. + zos_started_task: + state: "cancelled" + task_name: "SAMPLE" + - name: Cancel a started tasks using task name and asid. + zos_started_task: + state: "cancelled" + task_name: "SAMPLE" + asid: 0014 + - name: Cancel a started tasks using task name and asid. + zos_started_task: + state: "modified" + task_name: "SAMPLE" + parameters: ["XX=12"] + - name: Stop a started task using task name. + zos_started_task: + state: "stopped" + task_name: "SAMPLE" + - name: Stop a started task using task name, identifier and asid. + zos_started_task: + state: "stopped" + task_name: "SAMPLE" + identifier: "SAMPLE" + asid: 00A5 + - name: Force a started task using task name. + zos_started_task: + state: "forced" + task_name: "SAMPLE" + + + + + + + + + + +Return Values +------------- + + +changed + True if the state was changed, otherwise False. + + | **returned**: always + | **type**: bool + +cmd + Command executed via opercmd. + + | **returned**: changed + | **type**: str + | **sample**: S SAMPLE + +msg + Failure or skip message returned by the module. + + | **returned**: failure or skipped + | **type**: str + | **sample**: Command parameters are invalid. + +rc + The return code is 0 when command executed successfully. + + The return code is 1 when opercmd throws any error. + + The return code is 5 when any parameter validation failed. + + | **returned**: changed + | **type**: int + +state + The final state of the started task, after execution.. + + | **returned**: changed + | **type**: str + | **sample**: S SAMPLE + +stderr + The STDERR from the command, may be empty. + + | **returned**: changed + | **type**: str + | **sample**: An error has ocurred. + +stderr_lines + List of strings containing individual lines from STDERR. + + | **returned**: changed + | **type**: list + | **sample**: + + .. code-block:: json + + [ + "An error has ocurred" + ] + +stdout + The STDOUT from the command, may be empty. + + | **returned**: changed + | **type**: str + | **sample**: ISF031I CONSOLE OMVS0000 ACTIVATED. + +stdout_lines + List of strings containing individual lines from STDOUT. + + | **returned**: changed + | **type**: list + | **sample**: + + .. code-block:: json + + [ + "Allocation to SYSEXEC completed." + ] + +tasks + The output information for a list of started tasks matching specified criteria. + + If no started task is found then this will return empty. + + | **returned**: success + | **type**: list + | **elements**: dict + + address_space_second_table_entry + The control block used to manage memory for a started task + + | **type**: str + | **sample**: 03E78500 + + affinity + The identifier of the processor, for up to any four processors, if the job requires the services of specific processors. + + affinity=NONE means the job can run on any processor. + + | **type**: str + | **sample**: NONE + + asid + Address space identifier (ASID), in hexadecimal. + + | **type**: str + | **sample**: 44 + + cpu_time + The processor time used by the address space, including the initiator. This time does not include SRB time. + + cpu_time has one of these below formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. sss.tttS when time is less than 1000 seconds hh.mm.ss when time is at least 1000 seconds, but less than 100 hours hhhhh.mm when time is at least 100 hours ******** when time exceeds 100000 hours NOTAVAIL when the TOD clock is not working + + | **type**: str + | **sample**: 000.008S + + dataspaces + The started task dataspaces details. + + | **returned**: success + | **type**: list + | **elements**: dict + + data_space_address_entry + Central address of the data space ASTE. + + | **type**: str + | **sample**: 058F2180 + + dataspace_name + Data space name associated with the address space. + + | **type**: str + | **sample**: CIRRGMAP + + + domain_number + domain_number=N/A if the system is operating in goal mode. + + | **type**: str + | **sample**: N/A + + elapsed_time + For address spaces other than system address spaces, the elapsed time since job select time. + + For system address spaces created before master scheduler initialization, the elapsed time since master scheduler initialization. + + For system address spaces created after master scheduler initialization, the elapsed time since system address space creation. elapsed_time has one of these below formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. sss.tttS when time is less than 1000 seconds hh.mm.ss when time is at least 1000 seconds, but less than 100 hours hhhhh.mm when time is at least 100 hours ******** when time exceeds 100000 hours NOTAVAIL when the TOD clock is not working + + | **type**: str + | **sample**: 812.983S + + priority + The priority of a started task is determined by the Workload Manager (WLM), based on the service class and importance assigned to it. + + | **type**: str + | **sample**: 1 + + proc_step_name + For APPC-initiated transactions, the user ID requesting the transaction. + + The name of a step within a cataloged procedure that was called by the step specified in field sss. + + Blank, if there is no cataloged procedure. + + The identifier of the requesting transaction program. + + | **type**: str + | **sample**: VLF + + program_event_recording + YES if A PER trap is active in the address space. + + NO if No PER trap is active in the address space. + + | **type**: str + + program_name + program_name=N/A if the system is operating in goal mode. + + | **type**: str + | **sample**: N/A + + queue_scan_count + YES if the address space has been quiesced. + + NO if the address space is not quiesced. + + | **type**: str + + resource_group + The name of the resource group currently associated the service class. It can also be N/A if there is no resource group association. + + | **type**: str + | **sample**: N/A + + server + YES if the address space is a server. + + No if the address space is not a server. + + | **type**: str + + started_class_list + The name of the service class currently associated with the address space. + + | **type**: str + | **sample**: SYSSTC + + started_time + The time when the started task started. + + | **type**: str + | **sample**: 2025-09-11 18:21:50.293644+00:00 + + system_management_control + Number of outstanding step-must-complete requests. + + | **type**: str + + task_identifier + The name of a system address space. + + The name of a step, for a job or attached APPC transaction program attached by an initiator. + + The identifier of a task created by the START command. + + The name of a step that called a cataloged procedure. + + STARTING if initiation of a started job, system task, or attached APPC transaction program is incomplete. + + MASTER* for the master address space. + + The name of an initiator address space. + + | **type**: str + | **sample**: SPROC + + task_name + The name of the started task. + + | **type**: str + | **sample**: SAMPLE + + task_status + IN for swapped in. + + OUT for swapped out, ready to run. + + OWT for swapped out, waiting, not ready to run. + + OU* for in process of being swapped out. + + IN* for in process of being swapped in. + + NSW for non-swappable. + + | **type**: str + | **sample**: NSW + task_type + S for started task. + | **type**: str + | **sample**: S + workload_manager + The name of the workload currently associated with the address space. + | **type**: str + | **sample**: SYSTEM +verbose_output + If ``verbose=true``, the system log related to the started task executed state will be shown. + | **returned**: changed + | **type**: list + | **sample**: + .. code-block:: json + "NC0000000 ZOSMACHINE 25240 12:40:30.15 OMVS0000 00000210...." From cc0afef8a2d1c499d81d9f6babd8af0ee40b385c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Tue, 23 Sep 2025 11:25:10 -0500 Subject: [PATCH 61/73] [Enhancement][1538]support_administrator_share_words (#2320) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add new structure on sms restore test cases and add volumes * Remove prints * Add fragment * Fix unit testing for arguments * Add more validations * Fix documentation * Fix documentation * Fix documentation * Update volumes naming * Add versions * First iteration * Change assignation * Add testing * Add fragme and delete lines and extras unnecesary * Fix documents * Update tests/functional/modules/test_zos_backup_restore.py Co-authored-by: Fernando Flores * Update plugins/modules/zos_backup_restore.py Co-authored-by: Fernando Flores --------- Co-authored-by: André Marcel Gutiérrez Benítez Co-authored-by: Fernando Flores --- ...2320-support_administrator_share_words.yml | 6 + plugins/modules/zos_backup_restore.py | 105 +++++++++++++++++- .../modules/test_zos_backup_restore.py | 39 ++++++- 3 files changed, 147 insertions(+), 3 deletions(-) create mode 100644 changelogs/fragments/2320-support_administrator_share_words.yml diff --git a/changelogs/fragments/2320-support_administrator_share_words.yml b/changelogs/fragments/2320-support_administrator_share_words.yml new file mode 100644 index 0000000000..fee3194658 --- /dev/null +++ b/changelogs/fragments/2320-support_administrator_share_words.yml @@ -0,0 +1,6 @@ +minor_changes: + - zos_backup_restore - Adds ``access`` to specify how the module will access data sets and z/OS UNIX files when performing a backup or restore operation. + Adds ``share`` to specify the module allow data set read access to other programs while backing up or restoring. + Adds ``auth`` allows you to act as an administrator, where it will disable checking the current users privileges for z/OS UNIX files, data sets and + catalogs. + (https://github.com/ansible-collections/ibm_zos_core/pull/2320) \ No newline at end of file diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 0cb02acf23..e1ca3198d6 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -33,6 +33,54 @@ backups can be restored to systems where Ansible and ZOAU are not available. Conversely, dumps created with ADRDSSU and AMATERSE can be restored using this module. options: + access: + description: + - Specifies how the module will access data sets and z/OS UNIX files when + performing a backup or restore operation. + type: dict + required: false + suboptions: + share: + description: + - Specifies that the module allow data set read access to other programs + while backing up or restoring. + - I(share) and C(full_volume) are mutually exclusive; you cannot use both. + - Option I(share)is conditionally supported for I(operation=backup) or + I(operation=restore). + - When I(operation=backup), and source backup is a VSAM data set, the + option is only supported for VSAM data sets which are not defined with + VSAM SHAREOPTIONS (1,3) or (1,4). + - When I(operation=restore), and restore target is a VSAM data set or + PDSE data set, this option is not supported. Both data set types will + be accessed exlusivly preventing reading or writing to the VSAM, PDSE, + or PDSE members. + - The SHAREOPTIONS for VSAM data sets. + - (1) the data set can be shared by multiple programs for read-only + processing, or a single program for read and write processing. + - (2) the data set can be accessed by multiple programs for read-only + processing, and can also be accessed by a program for write processing. + - (3) the data set can be shared by multiple programs where each + program is responsible for maintaining both read and write data integrity. + - (4) the data set can be shared by multiple programs where each program is + responsible for maintaining both read and write data integrity differing + from (3) in that I/O buffers are updated for each request. + type: bool + required: false + default: false + auth: + description: + - I(auth=true) allows you to act as an administrator, where it will disable + checking the current users privileges for z/OS UNIX files, data sets and + catalogs. + - This is option is supported both, I(operation=backup) and I(operation=restore). + - If you are not authorized to use this option, the module ends with an + error message. + - Some authorization checking for data sets is unavoidable, when when I(auth) + is specified because some checks are initiated by services and programs + invoked by this module which can not be bypassed. + type: bool + required: false + default: false operation: description: - Used to specify the operation to perform. @@ -466,6 +514,15 @@ include: user.vsam.** backup_name: /tmp/temp_backup.dzp index: true + +- name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp + whether they exist or not and do so as authorized disabling any security checks. + zos_backup_restore: + operation: restore + backup_name: /tmp/temp_backup.dzp + access: + auth: true + share: true """ RETURN = r""" @@ -521,6 +578,14 @@ def main(): """ result = dict(changed=False, message="", backup_name="") module_args = dict( + access=dict( + type='dict', + required=False, + options=dict( + share=dict(type='bool', default=False), + auth=dict(type='bool', default=False) + ) + ), operation=dict(type="str", required=True, choices=["backup", "restore"]), data_sets=dict( required=False, @@ -576,6 +641,7 @@ def main(): hlq = params.get("hlq") tmp_hlq = params.get("tmp_hlq") sphere = params.get("index") + access = params.get('access') if sms and bool(sms.get("storage_class")) and sms.get("disable_automatic_storage_class"): module.fail_json(msg="storage_class and disable_automatic_storage_class are mutually exclusive, only one can be use by operation.") @@ -583,6 +649,9 @@ def main(): if sms and bool(sms.get("management_class")) and sms.get("disable_automatic_management_class"): module.fail_json(msg="management_class and disable_automatic_management_class are mutually exclusive, only one can be use by operation.") + if access and access.get("share") and full_volume: + module.fail_json(msg="access.share cannot be used with full_volume. These options are mutually exclusive.") + if operation == "backup": backup( backup_name=backup_name, @@ -600,6 +669,7 @@ def main(): sms=sms, tmp_hlq=tmp_hlq, sphere=sphere, + access=access, ) else: restore( @@ -617,6 +687,7 @@ def main(): sms=sms, tmp_hlq=tmp_hlq, sphere=sphere, + access=access, ) result["backup_name"] = backup_name result["changed"] = True @@ -660,6 +731,14 @@ def parse_and_validate_args(params): The updated params after additional parsing and validation. """ arg_defs = dict( + access=dict( + type='dict', + required=False, + options=dict( + share=dict(type='bool', default=False), + auth=dict(type='bool', default=False) + ) + ), operation=dict(type="str", required=True, choices=["backup", "restore"]), data_sets=dict( required=False, @@ -729,6 +808,7 @@ def backup( sms, tmp_hlq, sphere, + access, ): """Backup data sets or a volume to a new data set or unix file. @@ -764,6 +844,8 @@ def backup( Specifies the tmp hlq to temporary datasets. sphere : dict Specifies ADRDSSU keywords that is passed directly to the dunzip utility. + access : dict + Specifies keywords for share and administration permission. """ args = locals() zoau_args = to_dzip_args(**args) @@ -785,6 +867,7 @@ def restore( sms, tmp_hlq, sphere, + access, ): """Restore data sets or a volume from the backup. @@ -822,6 +905,8 @@ def restore( Specifies the tmp hlq to temporary datasets. sphere : dict Specifies ADRDSSU keywords that is passed directly to the dunzip utility. + access : dict + Specifies keywords for share and administration permission. Raises ------ @@ -849,7 +934,7 @@ def restore( ) -def set_adrdssu_keywords(sphere, sms=None): +def set_adrdssu_keywords(sphere, sms=None, access=None): """Set the values for special keywords, dunzip use key value for most special words. Parameters @@ -858,7 +943,8 @@ def set_adrdssu_keywords(sphere, sms=None): Dictionary of key value of management an storage class. sphere : bool Value if sphere will be use on dictionary for VSAM. - + access : dict + Dictionary of key values for management classes. Returns ------- keywords : dict @@ -880,6 +966,13 @@ def set_adrdssu_keywords(sphere, sms=None): bypassacs = set_bypassacs_str(sms.get("disable_automatic_class")) keywords.update(bypass_acs=bypassacs) + if access: + if access.get("auth"): + keywords.update(ADMINISTRATOR="ADMINistrator") + + if access.get("share"): + keywords.update(SHARE="SHAre") + return keywords @@ -1267,6 +1360,7 @@ def to_dunzip_args(**kwargs): zoau_args["keep_original_hlq"] = False sms = kwargs.get("sms") + access = kwargs.get("access") keywords = set_adrdssu_keywords(sphere=kwargs.get("sphere")) if sms: @@ -1286,6 +1380,13 @@ def to_dunzip_args(**kwargs): bypassacs = set_bypassacs_str(ds=sms.get("disable_automatic_class")) zoau_args["bypass_acs"] = bypassacs + if access: + if access.get("auth"): + zoau_args['admin'] = access.get("auth") + + if access.get("share"): + zoau_args['share'] = access.get("share") + if keywords: zoau_args["keywords"] = keywords diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index 660346813f..7c43c2688e 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -1022,7 +1022,6 @@ def test_backup_and_restore_all_of_sms_group(ansible_zos_module, volumes_sms_sys hosts, data_set_name, DATA_SET_CONTENTS, volume ) sms = {"storage_class":smsgrp} - for attempt in range(2): results = hosts.all.zos_backup_restore( data_sets=dict(include=data_set_name), @@ -1378,3 +1377,41 @@ def test_backup_of_vsam_index(ansible_zos_module, volumes_with_vvds): delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, alternate_index) delete_data_set_or_file(hosts, backup_name) + + +def test_backup_and_restore_of_auth_shr_group(ansible_zos_module, volumes_sms_systems): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_backup_location = get_tmp_ds_name() + try: + volumes = Volume_Handler(volumes_sms_systems) + volume, smsgrp = volumes.get_available_vol_with_sms() + delete_data_set_or_file(hosts, data_set_backup_location) + delete_data_set_or_file(hosts, data_set_name) + create_sequential_data_set_with_contents( + hosts, data_set_name, DATA_SET_CONTENTS, volume + ) + results = hosts.all.zos_backup_restore( + data_sets=dict(include=data_set_name), + operation="backup", + backup_name=data_set_backup_location, + overwrite=True, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, data_set_backup_location) + delete_data_set_or_file(hosts, data_set_name) + access = { + "share":True, + "auth":True + } + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=data_set_backup_location, + overwrite=True, + access=access, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, data_set_name) + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_backup_location) \ No newline at end of file From b47b3de251caa200f4c66f6d75f6f20c77aea8b7 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Wed, 24 Sep 2025 20:22:44 +0530 Subject: [PATCH 62/73] Addressing PR comments --- plugins/module_utils/better_arg_parser.py | 2 +- plugins/modules/zos_started_task.py | 125 ++++++++++------------ 2 files changed, 58 insertions(+), 69 deletions(-) diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index 62b9f247f2..f4a52eee09 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -360,7 +360,7 @@ def _bool_type(self, contents, resolve_dependencies): def _member_name_type(self, contents, resolve_dependencies): """Resolver for PDS/E member name type arguments. This is part of - zos_started_task member name validfation. + zos_started_task member name validation. Parameters ---------- diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 4bff7dda68..3704122aa1 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -30,49 +30,49 @@ arm: description: - I(arm) indicates to execute normal task termination routines without causing address space destruction. - - Only applicable when state is forced, otherwise is ignored. + - Only applicable when I(state) is C(forced), otherwise ignored. required: false type: bool armrestart: description: - - Indicates that the batch job or started task should be automatically restarted after the cancel + - Indicates that the batch job or started task should be automatically restarted after CANCEL completes, if it is registered as an element of the automatic restart manager. If the job or task is not registered or if you do not specify this parameter, MVS will not automatically restart the job or task. - - Only applicable when state is cancelled or forced, otherwise is ignored. + - Only applicable when I(state) is C(cancelled) or C(forced), otherwise ignored. required: false type: bool asid: description: - - When state is cancelled or stopped or forced, asid is the hexadecimal address space + - When I(state) is C(cancelled), C(stopped) or C(forced), I(asid) is the hexadecimal address space identifier of the work unit you want to cancel, stop or force. - - Only applicable when state is stopped or cancelled or forced, otherwise is ignored. + - Only applicable when I(state) is C(stopped), C(cancelled), or C(forced), otherwise ignored. required: false type: str device_type: description: - - Option device_type is the type of the output device (if any) associated with the task. - - Only applicable when state is started otherwise ignored. + - Option I(device_type) is the type of the output device (if any) associated with the task. + - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str device_number: description: - - Option device_number is the number of the device to be started. A device number is 3 or 4 + - Option I(device_number) is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. - - Only applicable when state=started otherwise ignored. + - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str dump: description: - A dump is to be taken. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) depends on the JCL for the job. - - Only applicable when state is cancelled otherwise ignored. + - Only applicable when I(state) is C(cancelled), otherwise ignored. required: false type: bool identifier_name: description: - - Option identifier_name is the name that identifies the task. This name can be up to 8 + - Option I(identifier_name) is the name that identifies the task. This name can be up to 8 characters long. The first character must be alphabetical. required: false type: str @@ -80,19 +80,19 @@ - identifier job_account: description: - - Option job_account specifies accounting data in the JCL JOB statement for the started + - Option I(job_account) specifies accounting data in the JCL JOB statement for the started task. If the source JCL was a job and has already accounting data, the value that is specified on this parameter overrides the accounting data in the source JCL. - - Only applicable when state is started otherwise ignored. + - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str job_name: description: - - When state=started job_name is a name which should be assigned to a started task - while starting it. If job_name is not specified, then member_name is used as job_name. - Otherwise, job_name is the started task job name used to find and apply the state + - When I(state) is started, I(job_name) is a name which should be assigned to a started task + while starting it. If I(job_name) is not specified, then I(member_name) is used as I(job_name). + Otherwise, I(job_name) is the started task job name used to find and apply the state selected. - - When state is displayed or modified or cancelled or stopped or forced, job_name is the + - When I(state) is C(displayed), C(modified), C(cancelled), C(stopped), or C(forced), I(job_name) is the started task name. required: false type: str @@ -105,15 +105,15 @@ - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than 44 characters in length. - - Only applicable when state is started otherwise ignored. + - Only applicable when I(state) is C(started), otherwise ignored. required: false type: dict member_name: description: - - Option member_name is a 1 - 8 character name of a member of a partitioned data set that + - Option I(member_name) is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL for the task to be started. The member can be either a job or a cataloged procedure. - - Only applicable when state is started otherwise ignored. + - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str aliases: @@ -121,14 +121,14 @@ parameters: description: - Program parameters passed to the started program. - - Only applicable when state is started or modified otherwise ignored. + - Only applicable when I(state) is C(started) or C(modified), otherwise ignored. required: false type: list elements: str retry: description: - I(retry) is applicable for only FORCE TCB. - - Only applicable when state= is forced otherwise ignored. + - Only applicable when I(state) is C(forced), otherwise ignored. required: false type: str choices: @@ -139,7 +139,7 @@ - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. - - Only applicable when state is started otherwise ignored. + - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str choices: @@ -148,12 +148,12 @@ state: description: - I(state) should be the desired state of the started task after the module is executed. - - If state is started and the respective member is not present on the managed node, then error will be thrown with rc=1, - changed=false and stderr which contains error details. - - If state is cancelled , modified, displayed, stopped or forced and the started task is not running on the managed node, - then error will be thrown with rc=1, changed=false and stderr contains error details. - - If state is displayed and the started task is running, then the module will return the started task details along with - changed=true. + - If I(state) is C(started) and the respective member is not present on the managed node, then error will be thrown with C(rc=1), + C(changed=false) and I(stderr) which contains error details. + - If I(state) is C(cancelled), C(modified), C(displayed), C(stopped) or C(forced) and the started task is not running on the managed node, + then error will be thrown with C(rc=1), C(changed=false) and I(stderr) contains error details. + - If I(state) is C(displayed) and the started task is running, then the module will return the started task details along with + C(changed=true). required: True type: str choices: @@ -168,39 +168,39 @@ - The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. - - Only applicable when state is started otherwise ignored. + - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str tcb_address: description: - I(tcb_address) is a 6-digit hexadecimal TCB address of the task to terminate. - - Only applicable when state is forced otherwise ignored. + - Only applicable when I(state) is C(forced), otherwise ignored. required: false type: str volume_serial: description: - - If devicetype is a tape or direct-access device, the volume serial number of the volume is + - If I(device_type) is a tape or direct-access device, the volume serial number of the volume is mounted on the device. - - Only applicable when state is started otherwise ignored. + - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str userid: description: - The user ID of the time-sharing user you want to cancel or force. - - Only applicable when state= is cancelled or forced , otherwise ignored. + - Only applicable when I(state) is C(cancelled) or C(forced), otherwise ignored. required: false type: str verbose: description: - - When verbose=true return system logs that describe the task execution. - Using this option will can return a big response depending on system load, also it could + - When C(verbose=true), return system logs that describe the task execution. + Using this option, can return a big response depending on system load, also it could surface other programs activity. required: false type: bool default: false wait_time: description: - - Option wait_time is the total time that module zos_started_task will wait for a submitted task in centiseconds. + - Option I(wait_time) is the total time that module zos_started_task will wait for a submitted task in centiseconds. The time begins when the module is executed on the managed node. Default value of 0 means to wait the default amount of time supported by the opercmd utility. required: false @@ -324,13 +324,13 @@ - The STDERR from the command, may be empty. returned: changed type: str - sample: An error has ocurred. + sample: An error has occurred. stderr_lines: description: - List of strings containing individual lines from STDERR. returned: changed type: list - sample: ["An error has ocurred"] + sample: ["An error has occurred"] stdout: description: - The STDOUT from the command, may be empty. @@ -380,7 +380,7 @@ sample: 000.008S dataspaces: description: - - The started task dataspaces details. + - The started task data spaces details. returned: success type: list elements: dict @@ -405,7 +405,7 @@ - For address spaces other than system address spaces, the elapsed time since job select time. - For system address spaces created before master scheduler initialization, the elapsed time since master scheduler initialization. - For system address spaces created after master scheduler initialization, the elapsed time since system address space creation. - elapsed_time has one of these below formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. + elapsed_time has one of these below formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. sss.tttS when time is less than 1000 seconds hh.mm.ss when time is at least 1000 seconds, but less than 100 hours hhhhh.mm when time is at least 100 hours @@ -616,12 +616,6 @@ def validate_and_prepare_start_command(module): keyword_parameters_string = "" device = device_type if device_type else device_number # Validations - if device_number and device_type: - module.fail_json( - rc=5, - msg="device_number and device_type are mutually exclusive.", - changed=False - ) if job_account and len(job_account) > 55: module.fail_json( rc=5, @@ -897,12 +891,6 @@ def prepare_force_command(module): msg="The TCB address of the task should be exactly 6-digit hexadecimal.", changed=False ) - if retry and not tcb_address: - module.fail_json( - rc=5, - msg="The RETRY parameter is valid with the TCB parameter only.", - changed=False - ) if userid and armrestart: module.fail_json( rc=5, @@ -1185,6 +1173,7 @@ def run_module(): mutually_exclusive=[ ['device_number', 'device_type'] ], + required_by={'retry': ['tcb_address']}, supports_check_mode=True ) @@ -1294,8 +1283,7 @@ def run_module(): verbose = module.params.get('verbose') kwargs = {} """ - Below error messages are used to detrmine if response has any error.When - response could have any of below error message has explained below. + Below error messages or error codes are used to determine if response has any error. JCL ERROR - IEE122I: Response contains this keyword when JCL contains syntax error. INVALID PARAMETER - IEE535I: When invalid parameter passed in command line. @@ -1306,12 +1294,12 @@ def run_module(): NON-CANCELABLE - IEE838I: When cancel command can't stop job and force command is needed. CANCELABLE - IEE838I: When force command used without using cancel command """ - start_errmsg = ['IEE122I', 'IEE535I', 'IEE307I', 'ERROR'] - stop_errmsg = ['IEE341I', 'IEE535I'] - display_errmsg = ['IEE341I', 'IEE535I', 'NOT FOUND'] - modify_errmsg = ['REJECTED', 'IEE341I', 'IEE535I', 'IEE311I'] - cancel_errmsg = ['IEE341I', 'IEE324I', 'IEE535I', 'IEE842I', 'NON-CANCELABLE'] - force_errmsg = ['IEE341I', 'IEE324I', 'IEE535I', 'CANCELABLE', 'IEE842I'] + start_errmsg = ['IEE122I', 'IEE535I', 'IEE307I', 'ERROR', 'IEE708I'] + stop_errmsg = ['IEE341I', 'IEE535I', 'IEE708I'] + display_errmsg = ['IEE341I', 'IEE535I', 'NOT FOUND', 'IEE708I'] + modify_errmsg = ['REJECTED', 'IEE341I', 'IEE535I', 'IEE311I', 'IEE708I'] + cancel_errmsg = ['IEE341I', 'IEE324I', 'IEE535I', 'IEE842I', 'NON-CANCELABLE', 'IEE708I'] + force_errmsg = ['IEE341I', 'IEE324I', 'IEE535I', 'CANCELABLE', 'IEE842I', 'IEE708I'] error_details = { 'IEE122I': 'Specified member is missing or PROC/JOB contains incorrect JCL statements.', 'IEE535I': 'A parameter on a command is not valid.', @@ -1323,7 +1311,8 @@ def run_module(): 'IEE842I': 'More than one active job with the specified name exist.', 'NON-CANCELABLE': 'The task cannot be canceled. Use the FORCE ARM command.', 'CANCELABLE': 'The task can be canceled. Use the CANCEL command.', - 'IEE311I': 'Required parameter is missing.' + 'IEE311I': 'Required parameter is missing.', + 'IEE708I': 'The value of a keyword specified on a command is incorrect.' } err_msg = [] kwargs = {} @@ -1364,24 +1353,24 @@ def run_module(): stdout = "" stderr = "" rc, out, err, task_params = execute_command(cmd, started_task_name, execute_display_before, timeout_s=wait_time_s, **kwargs) - isFailed = False + is_failed = False system_logs = "" msg = "" found_msg = next((msg for msg in err_msg if msg in out), None) if err != "" or found_msg: - isFailed = True + is_failed = True # Fetch system logs to validate any error occured in execution - if not isFailed or verbose: + if not is_failed or verbose: system_logs = fetch_logs(cmd.upper(), wait_time_s) # If sysout is not having error, then check system log as well to make sure no error occured - if not isFailed: + if not is_failed: found_msg = next((msg for msg in err_msg if msg in system_logs), None) if found_msg: - isFailed = True + is_failed = True if not verbose: system_logs = "" current_state = "" - if isFailed: + if is_failed: if rc == 0: rc = 1 changed = False From d87d31454849d78ab640fb7d47e02d6bb3d175cf Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Thu, 25 Sep 2025 20:42:22 +0530 Subject: [PATCH 63/73] resolving PR comments --- plugins/modules/zos_started_task.py | 133 +++++++++--------- .../modules/test_zos_started_task_func.py | 14 +- 2 files changed, 76 insertions(+), 71 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 3704122aa1..793ddb1c78 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -35,10 +35,9 @@ type: bool armrestart: description: - - Indicates that the batch job or started task should be automatically restarted after CANCEL - completes, if it is registered as an element of the automatic restart manager. If the job or - task is not registered or if you do not specify this parameter, MVS will not automatically - restart the job or task. + - Indicates that the batch job or started task should be automatically restarted after CANCEL or FORCE + completes, if it is registered as an element of the automatic restart manager. If the job or task is + not registered or if you do not specify this parameter, MVS will not automatically restart the job or task. - Only applicable when I(state) is C(cancelled) or C(forced), otherwise ignored. required: false type: bool @@ -51,21 +50,20 @@ type: str device_type: description: - - Option I(device_type) is the type of the output device (if any) associated with the task. + - Type of the output device (if any) associated with the task. - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str device_number: description: - - Option I(device_number) is the number of the device to be started. A device number is 3 or 4 - hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit - number. + - Number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must + precede a 4-digit number but is not before a 3-digit number. - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str dump: description: - - A dump is to be taken. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) + - Whether to perform a dump. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) depends on the JCL for the job. - Only applicable when I(state) is C(cancelled), otherwise ignored. required: false @@ -80,9 +78,9 @@ - identifier job_account: description: - - Option I(job_account) specifies accounting data in the JCL JOB statement for the started - task. If the source JCL was a job and has already accounting data, the value that is - specified on this parameter overrides the accounting data in the source JCL. + - Specifies accounting data in the JCL JOB statement for the started task. If the source JCL + was a job and has already accounting data, the value that is specified on this parameter + overrides the accounting data in the source JCL. - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str @@ -102,17 +100,16 @@ - task_name keyword_parameters: description: - - Any appropriate keyword parameter that you specify to override the corresponding - parameter in the cataloged procedure. The maximum length of each keyword=option is 66 - characters. No individual value within this field can be longer than 44 characters in length. + - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged + procedure. The maximum length of each keyword=option pair is 66 characters. No individual value within this + field can be longer than 44 characters in length. - Only applicable when I(state) is C(started), otherwise ignored. required: false type: dict member_name: description: - - Option I(member_name) is a 1 - 8 character name of a member of a partitioned data set that - contains the source JCL for the task to be started. The member can be either a job or a - cataloged procedure. + - Name of a member of a partitioned data set that contains the source JCL for the task to be started. The member + can be either a job or a cataloged procedure. - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str @@ -125,26 +122,21 @@ required: false type: list elements: str - retry: + retry_force: description: - - I(retry) is applicable for only FORCE TCB. + - Indicates whether retry will be attempted on ABTERM(abnormal termination). + - I(tcb_address) is mandatory to use I(retry_force). - Only applicable when I(state) is C(forced), otherwise ignored. required: false - type: str - choices: - - 'YES' - - 'NO' + type: bool reus_asid: description: - - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, - a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified - on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. + - When I(reus_asid) is C(True) and REUSASID(YES) is specified in the DIAGxx parmlib member, a reusable ASID is assigned + to the address space created by the START command. If I(reus_asid) is not specified or REUSASID(NO) is specified in + DIAGxx, an ordinary ASID is assigned. - Only applicable when I(state) is C(started), otherwise ignored. required: false - type: str - choices: - - 'YES' - - 'NO' + type: bool state: description: - I(state) should be the desired state of the started task after the module is executed. @@ -165,7 +157,7 @@ - forced subsystem: description: - - The name of the subsystem that selects the task for processing. The name must be 1 - 4 + - The name of the subsystem that selects the task for processing. The name must be 1-4 characters, which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. - Only applicable when I(state) is C(started), otherwise ignored. @@ -173,13 +165,13 @@ type: str tcb_address: description: - - I(tcb_address) is a 6-digit hexadecimal TCB address of the task to terminate. + - 6-digit hexadecimal TCB address of the task to terminate. - Only applicable when I(state) is C(forced), otherwise ignored. required: false type: str - volume_serial: + volume: description: - - If I(device_type) is a tape or direct-access device, the volume serial number of the volume is + - If I(device_type) is a tape or direct-access device, the serial number of the volume, mounted on the device. - Only applicable when I(state) is C(started), otherwise ignored. required: false @@ -192,9 +184,9 @@ type: str verbose: description: - - When C(verbose=true), return system logs that describe the task execution. - Using this option, can return a big response depending on system load, also it could - surface other programs activity. + - When C(verbose=true), the module will return system logs that describe the task's execution. + This option can return a big response depending on system load, also it could surface other + program's activity. required: false type: bool default: false @@ -405,7 +397,7 @@ - For address spaces other than system address spaces, the elapsed time since job select time. - For system address spaces created before master scheduler initialization, the elapsed time since master scheduler initialization. - For system address spaces created after master scheduler initialization, the elapsed time since system address space creation. - elapsed_time has one of these below formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. + elapsed_time has one of following formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. sss.tttS when time is less than 1000 seconds hh.mm.ss when time is at least 1000 seconds, but less than 100 hours hhhhh.mm when time is at least 100 hours @@ -475,8 +467,8 @@ - The name of a step, for a job or attached APPC transaction program attached by an initiator. - The identifier of a task created by the START command. - The name of a step that called a cataloged procedure. - - STARTING if initiation of a started job, system task, or attached APPC transaction program is incomplete. - - MASTER* for the master address space. + - C(STARTING) if initiation of a started job, system task, or attached APPC transaction program is incomplete. + - C(*MASTER*) for the master address space. - The name of an initiator address space. type: str sample: SPROC @@ -487,17 +479,22 @@ sample: SAMPLE task_status: description: - - IN for swapped in. - - OUT for swapped out, ready to run. - - OWT for swapped out, waiting, not ready to run. - - OU* for in process of being swapped out. - - IN* for in process of being swapped in. - - NSW for non-swappable. + - C(IN) for swapped in. + - C(OUT) for swapped out, ready to run. + - C(OWT) for swapped out, waiting, not ready to run. + - C(OU*) for in process of being swapped out. + - C(IN*) for in process of being swapped in. + - C(NSW) for non-swappable. type: str sample: NSW task_type: description: - - S for started task. + - C(S) for started task. + - C(A) for an attached APPC transaction program. + - C(I) for initiator address space. + - C(J) for job + - C(M) for mount + - C(*) for system address space type: str sample: S workload_manager: @@ -609,9 +606,14 @@ def validate_and_prepare_start_command(module): parameters = module.params.get('parameters') or [] device_type = module.params.get('device_type') or "" device_number = module.params.get('device_number') or "" - volume_serial = module.params.get('volume_serial') or "" + volume_serial = module.params.get('volume') or "" subsystem_name = module.params.get('subsystem') - reus_asid = module.params.get('reus_asid') + reus_asid = '' + if module.params.get('reus_asid') is not None: + if module.params.get('reus_asid'): + reus_asid = 'YES' + else: + reus_asid = 'NO' keyword_parameters = module.params.get('keyword_parameters') keyword_parameters_string = "" device = device_type if device_type else device_number @@ -883,7 +885,12 @@ def prepare_force_command(module): armrestart = module.params.get('armrestart') userid = module.params.get('userid') tcb_address = module.params.get('tcb_address') - retry = module.params.get('retry') + retry = '' + if module.params.get('retry_force') is not None: + if module.params.get('retry_force'): + retry = 'YES' + else: + retry = 'NO' started_task_name = "" if tcb_address and len(tcb_address) != 6: module.fail_json( @@ -1133,15 +1140,13 @@ def run_module(): 'elements': 'str', 'required': False }, - 'retry': { - 'type': 'str', - 'required': False, - 'choices': ['YES', 'NO'] + 'retry_force': { + 'type': 'bool', + 'required': False }, 'reus_asid': { - 'type': 'str', - 'required': False, - 'choices': ['YES', 'NO'] + 'type': 'bool', + 'required': False }, 'subsystem': { 'type': 'str', @@ -1160,7 +1165,7 @@ def run_module(): 'required': False, 'default': False }, - 'volume_serial': { + 'volume': { 'type': 'str', 'required': False }, @@ -1173,7 +1178,7 @@ def run_module(): mutually_exclusive=[ ['device_number', 'device_type'] ], - required_by={'retry': ['tcb_address']}, + required_by={'retry_force': ['tcb_address']}, supports_check_mode=True ) @@ -1234,12 +1239,12 @@ def run_module(): 'elements': 'str', 'required': False }, - 'retry': { - 'arg_type': 'str', + 'retry_force': { + 'arg_type': 'bool', 'required': False }, 'reus_asid': { - 'arg_type': 'str', + 'arg_type': 'bool', 'required': False }, 'subsystem': { @@ -1258,7 +1263,7 @@ def run_module(): 'arg_type': 'bool', 'required': False }, - 'volume_serial': { + 'volume': { 'arg_type': 'str', 'required': False }, diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index 39c49a8d51..b0ec69e9ad 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -133,7 +133,7 @@ def test_start_task_with_invalid_volumeserial(ansible_zos_module): start_results = hosts.all.zos_started_task( state = "started", member_name = "SAMPLE", - volume_serial = "12345A" + volume = "12345A" ) for result in start_results.contacted.values(): assert result.get("changed") is False @@ -158,7 +158,7 @@ def test_start_task_with_invalid_parameters(ansible_zos_module): state = "started", member_name = "SAMPLE", parameters = ["KEY1", "KEY2", "KEY3"], - volume_serial = "123456" + volume = "123456" ) for result in start_results.contacted.values(): assert result.get("changed") is False @@ -395,7 +395,7 @@ def test_force_task_negative(ansible_zos_module): force_results = hosts.all.zos_started_task( state = "forced", job_name = "TESTER", - retry = "YES" + retry_force = True ) for result in force_results.contacted.values(): assert result.get("changed") is False @@ -405,7 +405,7 @@ def test_force_task_negative(ansible_zos_module): state = "forced", job_name = "TESTER", tcb_address = "0006789", - retry = "YES" + retry_force = True ) for result in force_results.contacted.values(): assert result.get("changed") is False @@ -416,7 +416,7 @@ def test_force_task_negative(ansible_zos_module): job_name = "TESTER", identifier = "SAMPLE", tcb_address = "000678", - retry = "YES" + retry_force = True ) for result in force_results.contacted.values(): assert result.get("changed") is False @@ -426,7 +426,7 @@ def test_force_task_negative(ansible_zos_module): state = "forced", userid = "OMVSTEST", tcb_address = "000678", - retry = "YES", + retry_force = True, verbose=True ) for result in force_results.contacted.values(): @@ -496,7 +496,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): state = "started", member = "SAMPLE", identifier = "TESTER", - reus_asid = "YES" + reus_asid = True ) for result in start_results.contacted.values(): assert result.get("changed") is True From 5859a09de5a10dd0e8f716e4450debe2ce98b851 Mon Sep 17 00:00:00 2001 From: Rohitash Goyal Date: Fri, 26 Sep 2025 21:10:20 +0530 Subject: [PATCH 64/73] adding ansible.cfg back to resolve action plugin issue (#2333) --- .gitignore | 1 - ansible.cfg | 47 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) create mode 100644 ansible.cfg diff --git a/.gitignore b/.gitignore index 044220f6df..7270e43399 100644 --- a/.gitignore +++ b/.gitignore @@ -258,7 +258,6 @@ shell_exploits.txt importer_result.json ac scripts/ -ansible.cfg ################################################################################ # Debugging .ignore, if you want to know why a particular file is being ignored diff --git a/ansible.cfg b/ansible.cfg new file mode 100644 index 0000000000..19fa364ccb --- /dev/null +++ b/ansible.cfg @@ -0,0 +1,47 @@ +################################################################################ +# Copyright (c) IBM Corporation 2020, 2021 +################################################################################ + +################################################################################ +# For for on ansible.cfg options see: +# https://docs.ansible.com/ansible/latest/reference_appendices/config.html +# +# For a full sample see: +# https://github.com/ansible/ansible/blob/devel/examples/ansible.cfg +# +# Note: +# Examples of some options often used: +# remote_temp - The temporary directory Ansible uses to transfer files onto +# the controller. Default is `/.ansible/tmp` +# ansible_port - The connection port number Ansible uses to connect to the +# target; configure the target if is not using default SSH +# port 22 +# debug - Toggles debug output in Ansible. This is very verbose and can +# hinder multiprocessing. Debug output can also include secret +# information despite no_log settings being enabled, which means debug +# mode should not be used in production. Optionally, ad-hoc you can +# use ANSIBLE_DEBUG=1 +# verbosity - Sets the default verbosity, equivalent to the number of -v's +# passed in the command line. +# i.e. 0|1|2|3|4 == None|-v|-vv|-vvv|-vvvv +################################################################################ + +[defaults] +forks = 25 +action_plugins=~/.ansible/collections/ansible_collections/ibm/ibm_zos_core/plugins/action +# remote_tmp = /u/ansible/tmp +# remote_port = 22 +# debug = True +# verbosity = 1 + +[ssh_connection] +pipelining = True + +[connection] +pipelining = True + +[colors] +verbose = green + +[persistent_connection] +command_timeout = 60 \ No newline at end of file From 08c46460442fca7841eda53eb532c682a3e73c3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= <68956970+AndreMarcel99@users.noreply.github.com> Date: Fri, 26 Sep 2025 12:07:27 -0500 Subject: [PATCH 65/73] [Documentation][2234]update_zos_replace_documentation (#2239) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Test * Change documentation * Add fragment * Fix : * Fix line too long * Fix content * Update plugins/modules/zos_replace.py Co-authored-by: Alex Moreno * Update plugins/modules/zos_replace.py Co-authored-by: Alex Moreno * Apply suggestions from code review Co-authored-by: Alex Moreno * Modified zos_replace documentation with latest feedback * Fixed documentation lint issue * Fix O for I and new examples * Avoid check * Fixed a couple of sanity issues * Updated doc source * Updated the generated RST * Reset docs rst * Added replace rst * Update plugins/modules/zos_replace.py --------- Co-authored-by: André Marcel Gutiérrez Benítez Co-authored-by: Fernando Flores Co-authored-by: Alex Moreno Co-authored-by: Fernando Flores --- .../2239_Update_zos_replace_documentation.yml | 3 + docs/source/modules/zos_replace.rst | 66 +++++++++++++--- plugins/modules/zos_replace.py | 79 ++++++++++++++----- 3 files changed, 114 insertions(+), 34 deletions(-) create mode 100644 changelogs/fragments/2239_Update_zos_replace_documentation.yml diff --git a/changelogs/fragments/2239_Update_zos_replace_documentation.yml b/changelogs/fragments/2239_Update_zos_replace_documentation.yml new file mode 100644 index 0000000000..55d0b83bcf --- /dev/null +++ b/changelogs/fragments/2239_Update_zos_replace_documentation.yml @@ -0,0 +1,3 @@ +trivial: + - zos_replace_func.py - Update documentation adding default values and add verbosity for after, before and literal options. + (https://github.com/ansible-collections/ibm_zos_core/pull/2239). \ No newline at end of file diff --git a/docs/source/modules/zos_replace.rst b/docs/source/modules/zos_replace.rst index 3a0dfcce34..70b2adf2f7 100644 --- a/docs/source/modules/zos_replace.rst +++ b/docs/source/modules/zos_replace.rst @@ -27,9 +27,17 @@ Parameters after - If specified, only content after this match will be replaced/removed. + A regular expression that, if specified, determines which content will be replaced or removed **after** the match. - Can be used in combination with *before*. + Option *after* is the start position from where the module will seek to match the *regexp* pattern. When a pattern is matched, occurrences are substituted with the value set for *replace*. + + If option *after* is not set, the module will search from the beginning of the *target*. + + Option *after* is a regular expression as described in the `Python library `_. + + Option *after* can be used in combination with *before*. When combined with *before*, patterns are replaced or removed from *after* until the value set for *before*. + + Option *after* can be interpreted as a literal string instead of a regular expression by setting option *literal=after*. | **required**: False | **type**: str @@ -69,9 +77,17 @@ backup_name before - If specified, only content before this match will be replaced/removed. + A regular expression that if, specified, determines which content will be replaced or removed **before** the match. + + Option *before* is the end position from where the module will seek to match the *regexp* pattern. When a pattern is matched, occurrences are substituted with the value set for *replace*. + + If option *before* is not set, the module will search to the end of the *target*. - Can be used in combination with *after*. + Option *before* is a regular expression as described in the `Python library `_. + + Option *before* can be used in combination with *after*. When combined with *after*, patterns are replaced or removed from *after* until the value set for *before*. + + Option *before* can be interpreted as a literal string instead of a regular expression by setting option *literal=before*. | **required**: False | **type**: str @@ -88,10 +104,17 @@ encoding literal - A list or string that allows the user to specify choices "before", "after", or "regexp" as regular strings instead of regex patterns. + If specified, it enables the module to interpret options *after*, *before* and *regexp* as a literal rather than a regular expression. + + Option *literal* uses any combination of V(after), V(before) and V(regexp). + + To interpret one option as a literal, use *literal=regexp*, *literal=after* or *literal=before*. + + To interpret multiple options as a literal, use a list such as ``['after', 'before']`` or ``['regex', 'after', 'before']`` | **required**: False | **type**: raw + | **default**: [] target @@ -140,18 +163,19 @@ Examples .. code-block:: yaml+jinja - - name: Replace with blank space on a USS file any occurrences of the regex + - name: Replace 'profile/' pattern in USS file via blank substitution. zos_replace: target: /tmp/src/somefile regexp: 'profile\/' - - name: Replace using after on USS file + - name: Replace regexp match with blank after line match in USS file. zos_replace: target: "/tmp/source" regexp: '^MOUNTPOINT*' after: export ZOAU_ROOT - - name: Replace a specific line with special character on a dataset after a line + - name: Replace a specific line with special character on a dataset after a line, treating the text specified + for regexp as a literal string and after as regular expression. zos_replace: target: SAMPLE.SOURCE regexp: //*LIB DD UNIT=SYS,SPACE=(TRK,(1,1)),VOL=SER=vvvvvv @@ -159,7 +183,16 @@ Examples after: '^\$source base \([^\s]+\)' literal: regexp - - name: Replace a specific line before a specific sentence with backup + - name: Replace a specific line with special character on a dataset after a line, treating the text specified + for regexp and after as regular expression. + zos_replace: + target: SAMPLE.SOURCE + regexp: '\ \*\*LIB\ \ DD\ UNIT=SYS,SPACE=\(TRK,\(1,1\)\),VOL=SER=vvvvvv' + replace: //*LIB DD UNIT=SYS,SPACE=(CYL,(1,1)) + after: '^\$source base \([^\s]+\)' + literal: regexp + + - name: Replace a specific line before a specific sentence with backup, treating the text specified for regexp and before as literal strings. zos_replace: target: SAMPLE.SOURCE backup: true @@ -169,7 +202,14 @@ Examples - regexp - before - - name: Replace some words between two lines with a backup with tmp_hlq + - name: Replace a specific line before a specific sentence with backup, treating the text specified for regexp and before as regular expression. + zos_replace: + target: SAMPLE.SOURCE + backup: true + regexp: '\ //SYSPRINT\ DD\ SYSOUT=\*' + before: '\ SAMPLES OUTPUT SYSIN\ \*\=\$DSN' + + - name: Replace 'var' with 'vars' between matched lines after and before with backup. zos_replace: target: SAMPLE.DATASET tmp_hlq: ANSIBLE @@ -180,7 +220,7 @@ Examples after: ^/tmp/source* before: ^ if* - - name: Replace lines on a GDS and generate a backup on the same GDG + - name: Replace lines on a GDS and generate a backup on the same GDG. zos_replace: target: SOURCE.GDG(0) regexp: ^(IEE132I|IEA989I|IEA888I|IEF196I|IEA000I)\s.* @@ -189,7 +229,7 @@ Examples backup: true backup_name: "SOURCE.GDG(+1)" - - name: Delete some calls to SYSTEM on a member using a backref + - name: Delete 'SYSTEM' calls via backref between matched lines in a PDS member. zos_replace: target: PDS.SOURCE(MEM) regexp: '^(.*?SYSTEM.*?)SYSTEM(.*)' @@ -232,7 +272,7 @@ changed .. code-block:: json - 1 + true found Number of matches found diff --git a/plugins/modules/zos_replace.py b/plugins/modules/zos_replace.py index 81934c1ae5..4955262964 100644 --- a/plugins/modules/zos_replace.py +++ b/plugins/modules/zos_replace.py @@ -26,9 +26,16 @@ options: after: description: - - If specified, only content after this match will be replaced/removed. - - Can be used in combination with I(before). + - A regular expression that, if specified, determines which content will be replaced or removed B(after) the match. + - Option I(after) is the start position from where the module will seek to match the I(regexp) pattern. + When a pattern is matched, occurrences are substituted with the value set for I(replace). + - If option I(after) is not set, the module will search from the beginning of the I(target). + - Option I(after) is a regular expression as described in the L(Python library,https://docs.python.org/3/library/re.html). + - Option I(after) can be used in combination with I(before). + When combined with I(before), patterns are replaced or removed from I(after) until the value set for I(before). + - Option I(after) can be interpreted as a literal string instead of a regular expression by setting option I(literal=after). required: false + default: '' type: str backup: description: @@ -63,9 +70,16 @@ type: str before: description: - - If specified, only content before this match will be replaced/removed. - - Can be used in combination with I(after). + - A regular expression that if, specified, determines which content will be replaced or removed B(before) the match. + - Option I(before) is the end position from where the module will seek to match the I(regexp) pattern. + When a pattern is matched, occurrences are substituted with the value set for I(replace). + - If option I(before) is not set, the module will search to the end of the I(target). + - Option I(before) is a regular expression as described in the L(Python library,https://docs.python.org/3/library/re.html). + - Option I(before) can be used in combination with I(after). + When combined with I(after), patterns are replaced or removed from I(after) until the value set for I(before). + - Option I(before) can be interpreted as a literal string instead of a regular expression by setting option I(literal=before). required: false + default: '' type: str encoding: description: @@ -80,8 +94,12 @@ default: IBM-1047 literal: description: - - A list or string that allows the user to specify choices "before", "after", or "regexp" as regular strings instead of regex patterns. + - If specified, it enables the module to interpret options I(after), I(before) and I(regexp) as a literal rather than a regular expression. + - Option I(literal) uses any combination of V(after), V(before) and V(regexp). + - To interpret one option as a literal, use I(literal=regexp), I(literal=after) or I(literal=before). + - To interpret multiple options as a literal, use a list such as C(['after', 'before']) or C(['regex', 'after', 'before']). required: false + default: [] type: raw target: description: @@ -112,7 +130,7 @@ - If not set, matches are removed entirely. required: false type: str - default: "" + default: '' notes: - For supported character sets used to encode data, refer to the @@ -120,18 +138,19 @@ """ EXAMPLES = r""" -- name: Replace with blank space on a USS file any occurrences of the regex +- name: Replace 'profile/' pattern in USS file via blank substitution. zos_replace: target: /tmp/src/somefile regexp: 'profile\/' -- name: Replace using after on USS file +- name: Replace regexp match with blank after line match in USS file. zos_replace: target: "/tmp/source" regexp: '^MOUNTPOINT*' after: export ZOAU_ROOT -- name: Replace a specific line with special character on a dataset after a line +- name: Replace a specific line with special character on a dataset after a line, treating the text specified + for regexp as a literal string and after as regular expression. zos_replace: target: SAMPLE.SOURCE regexp: //*LIB DD UNIT=SYS,SPACE=(TRK,(1,1)),VOL=SER=vvvvvv @@ -139,7 +158,16 @@ after: '^\$source base \([^\s]+\)' literal: regexp -- name: Replace a specific line before a specific sentence with backup +- name: Replace a specific line with special character on a dataset after a line, treating the text specified + for regexp and after as regular expression. + zos_replace: + target: SAMPLE.SOURCE + regexp: '\ \*\*LIB\ \ DD\ UNIT=SYS,SPACE=\(TRK,\(1,1\)\),VOL=SER=vvvvvv' + replace: //*LIB DD UNIT=SYS,SPACE=(CYL,(1,1)) + after: '^\$source base \([^\s]+\)' + literal: regexp + +- name: Replace a specific line before a specific sentence with backup, treating the text specified for regexp and before as literal strings. zos_replace: target: SAMPLE.SOURCE backup: true @@ -149,7 +177,14 @@ - regexp - before -- name: Replace some words between two lines with a backup with tmp_hlq +- name: Replace a specific line before a specific sentence with backup, treating the text specified for regexp and before as regular expression. + zos_replace: + target: SAMPLE.SOURCE + backup: true + regexp: '\ //SYSPRINT\ DD\ SYSOUT=\*' + before: '\ SAMPLES OUTPUT SYSIN\ \*\=\$DSN' + +- name: Replace 'var' with 'vars' between matched lines after and before with backup. zos_replace: target: SAMPLE.DATASET tmp_hlq: ANSIBLE @@ -160,7 +195,7 @@ after: ^/tmp/source* before: ^ if* -- name: Replace lines on a GDS and generate a backup on the same GDG +- name: Replace lines on a GDS and generate a backup on the same GDG. zos_replace: target: SOURCE.GDG(0) regexp: ^(IEE132I|IEA989I|IEA888I|IEF196I|IEA000I)\s.* @@ -169,7 +204,7 @@ backup: true backup_name: "SOURCE.GDG(+1)" -- name: Delete some calls to SYSTEM on a member using a backref +- name: Delete 'SYSTEM' calls via backref between matched lines in a PDS member. zos_replace: target: PDS.SOURCE(MEM) regexp: '^(.*?SYSTEM.*?)SYSTEM(.*)' @@ -183,13 +218,13 @@ description: Name of the backup file or data set that was created. returned: if backup=true type: str - sample: /path/to/file.txt.2015-02-03@04:15 + sample: "/path/to/file.txt.2015-02-03@04:15" changed: description: Indicates if the source was modified. returned: always type: bool - sample: 1 + sample: True found: description: Number of matches found returned: success @@ -557,28 +592,28 @@ def replace_func(file, regexp, replace, module, uss, literal, encoding="cp1047", def run_module(): module = AnsibleModule( argument_spec=dict( - after=dict(type='str'), + after=dict(type='str', default=''), backup=dict(type='bool', default=False, required=False), backup_name=dict(type='str', default=None, required=False), - before=dict(type='str'), + before=dict(type='str', default=''), encoding=dict(type='str', default='IBM-1047', required=False), target=dict(type="str", required=True, aliases=['src', 'path', 'destfile']), tmp_hlq=dict(type='str', required=False, default=None), - literal=dict(type="raw", required=False, default=None), + literal=dict(type="raw", required=False, default=[]), regexp=dict(type="str", required=True), replace=dict(type='str', default=""), ), supports_check_mode=False ) args_def = dict( - after=dict(type='str'), + after=dict(type='str', default=''), backup=dict(type='bool', default=False, required=False), backup_name=dict(type='data_set_or_path', default=None, required=False), - before=dict(type='str'), + before=dict(type='str', default=''), encoding=dict(type='str', default='IBM-1047', required=False), target=dict(type="data_set_or_path", required=True, aliases=['src', 'path', 'destfile']), tmp_hlq=dict(type='qualifier_or_empty', required=False, default=None), - literal=dict(type=literals, required=False, default=None), + literal=dict(type=literals, required=False, default=[]), regexp=dict(type="str", required=True), replace=dict(type='str', default=""), ) @@ -708,6 +743,8 @@ def literals(contents, dependencies): allowed_values = {"after", "before", "regexp"} if not contents: return None + if contents == []: + return None if not isinstance(contents, list): contents = [contents] for val in contents: From cc682752da5ff81444362b1f7331a1c9a9693fb3 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Sat, 27 Sep 2025 23:45:06 +0530 Subject: [PATCH 66/73] resolving review comments --- plugins/modules/zos_started_task.py | 159 +++++++++--------- .../modules/test_zos_started_task_func.py | 1 + 2 files changed, 81 insertions(+), 79 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 793ddb1c78..b3981c464d 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -79,17 +79,14 @@ job_account: description: - Specifies accounting data in the JCL JOB statement for the started task. If the source JCL - was a job and has already accounting data, the value that is specified on this parameter - overrides the accounting data in the source JCL. + already had accounting data, the value that is specified on this parameter overrides it. - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str job_name: description: - - When I(state) is started, I(job_name) is a name which should be assigned to a started task - while starting it. If I(job_name) is not specified, then I(member_name) is used as I(job_name). - Otherwise, I(job_name) is the started task job name used to find and apply the state - selected. + - When I(state) is started, this is the name which should be assigned to a started task + while starting it. If I(job_name) is not specified, then I(member_name) is used as job's name. - When I(state) is C(displayed), C(modified), C(cancelled), C(stopped), or C(forced), I(job_name) is the started task name. required: false @@ -158,7 +155,7 @@ subsystem: description: - The name of the subsystem that selects the task for processing. The name must be 1-4 - characters, which are defined in the IEFSSNxx parmlib member, and the subsystem must + characters long, which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. - Only applicable when I(state) is C(started), otherwise ignored. required: false @@ -192,7 +189,7 @@ default: false wait_time: description: - - Option I(wait_time) is the total time that module zos_started_task will wait for a submitted task in centiseconds. + - Total time that the module will wait for a submitted task, measured in seconds. The time begins when the module is executed on the managed node. Default value of 0 means to wait the default amount of time supported by the opercmd utility. required: false @@ -211,37 +208,37 @@ description: Can run in check_mode and return changed status prediction without modifying target. If not supported, the action will be skipped. """ EXAMPLES = r""" -- name: Start a started task using member name. +- name: Start a started task using a member in a partitioned data set. zos_started_task: state: "started" member: "PROCAPP" -- name: Start a started task using member name and identifier. +- name: Start a started task using a member name and giving it an identifier. zos_started_task: state: "started" member: "PROCAPP" identifier: "SAMPLE" -- name: Start a started task using member name and job. +- name: Start a started task using both a member and a job name. zos_started_task: state: "started" member: "PROCAPP" job_name: "SAMPLE" -- name: Start a started task using member name, job and enable verbose. +- name: Start a started task and enable verbose output. zos_started_task: state: "started" member: "PROCAPP" job_name: "SAMPLE" verbose: True -- name: Start a started task using member name, subsystem and enable reuse asid. +- name: Start a started task specifying the subsystem and enabling a reusable ASID. zos_started_task: state: "started" member: "PROCAPP" subsystem: "MSTR" reus_asid: "YES" -- name: Display a started task using started task name. +- name: Display a started task using a started task name. zos_started_task: state: "displayed" task_name: "PROCAPP" -- name: Display started tasks using matching regex. +- name: Display all started tasks that begin with an s using a wildcard. zos_started_task: state: "displayed" task_name: "s*" @@ -249,31 +246,31 @@ zos_started_task: state: "displayed" task_name: "all" -- name: Cancel a started tasks using task name. +- name: Cancel a started task using task name. zos_started_task: state: "cancelled" task_name: "SAMPLE" -- name: Cancel a started tasks using task name and asid. +- name: Cancel a started task using it's task name and ASID. zos_started_task: state: "cancelled" task_name: "SAMPLE" asid: 0014 -- name: Cancel a started tasks using task name and asid. +- name: Modify a started task's parameters. zos_started_task: state: "modified" task_name: "SAMPLE" parameters: ["XX=12"] -- name: Stop a started task using task name. +- name: Stop a started task using it's task name. zos_started_task: state: "stopped" task_name: "SAMPLE" -- name: Stop a started task using task name, identifier and asid. +- name: Stop a started task using it's task name, identifier and ASID. zos_started_task: state: "stopped" task_name: "SAMPLE" identifier: "SAMPLE" asid: 00A5 -- name: Force a started task using task name. +- name: Force a started task using it's task name. zos_started_task: state: "forced" task_name: "SAMPLE" @@ -307,7 +304,7 @@ sample: 0 state: description: - - The final state of the started task, after execution.. + - The final state of the started task, after execution. returned: changed type: str sample: S SAMPLE @@ -382,13 +379,14 @@ - Central address of the data space ASTE. type: str sample: 058F2180 - dataspace_name: + data_space_name: description: - Data space name associated with the address space. type: str sample: CIRRGMAP domain_number: description: + - The z/OS system or sysplex domain where started task is running. - domain_number=N/A if the system is operating in goal mode. type: str sample: N/A @@ -397,17 +395,17 @@ - For address spaces other than system address spaces, the elapsed time since job select time. - For system address spaces created before master scheduler initialization, the elapsed time since master scheduler initialization. - For system address spaces created after master scheduler initialization, the elapsed time since system address space creation. - elapsed_time has one of following formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. - sss.tttS when time is less than 1000 seconds - hh.mm.ss when time is at least 1000 seconds, but less than 100 hours - hhhhh.mm when time is at least 100 hours - ******** when time exceeds 100000 hours - NOTAVAIL when the TOD clock is not working + - elapsed_time has one of following formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. + sss.tttS when time is less than 1000 seconds + hh.mm.ss when time is at least 1000 seconds, but less than 100 hours + hhhhh.mm when time is at least 100 hours + ******** when time exceeds 100000 hours + NOTAVAIL when the TOD clock is not working type: str sample: 812.983S priority: description: - - The priority of a started task is determined by the Workload Manager (WLM), based on the service class and importance assigned to it. + - Priority of a started task, as determined by the Workload Manager (WLM), based on the service class and importance assigned to it. type: str sample: 1 proc_step_name: @@ -426,6 +424,7 @@ sample: NO program_name: description: + - The name of the program(load module) that created or is running in the started task's address space. - program_name=N/A if the system is operating in goal mode. type: str sample: N/A @@ -437,7 +436,7 @@ sample: NO resource_group: description: - - The name of the resource group currently associated the service class. It can also be N/A if there is no resource group association. + - The name of the resource group currently associated with the service class. It can also be N/A if there is no resource group association. type: str sample: N/A server: @@ -576,7 +575,7 @@ def execute_display_command(started_task_name, timeout=0): list List contains extracted parameters from display command output of started task """ - cmd = "d a," + started_task_name + cmd = f"d a,{started_task_name}" display_response = opercmd.execute(cmd, timeout) task_params = [] if display_response.rc == 0 and display_response.stderr_response == "": @@ -603,7 +602,7 @@ def validate_and_prepare_start_command(module): identifier = module.params.get('identifier_name') job_name = module.params.get('job_name') job_account = module.params.get('job_account') - parameters = module.params.get('parameters') or [] + parameters = module.params.get('parameters', []) device_type = module.params.get('device_type') or "" device_number = module.params.get('device_number') or "" volume_serial = module.params.get('volume') or "" @@ -621,7 +620,7 @@ def validate_and_prepare_start_command(module): if job_account and len(job_account) > 55: module.fail_json( rc=5, - msg="job_account value should not exceed 55 characters.", + msg="The length of job_account exceeded 55 characters.", changed=False ) if device_number: @@ -629,13 +628,13 @@ def validate_and_prepare_start_command(module): if devnum_len not in (3, 5) or (devnum_len == 5 and not device_number.startswith("/")): module.fail_json( rc=5, - msg="Invalid device_number.", + msg="device_number should be 3 or 4 characters long and preceded by / when it is 4 characters long.", changed=False ) if subsystem_name and len(subsystem_name) > 4: module.fail_json( rc=5, - msg="The subsystem_name must be 1 - 4 characters.", + msg="The subsystem_name must be 1-4 characters long.", changed=False ) if keyword_parameters: @@ -645,13 +644,13 @@ def validate_and_prepare_start_command(module): if key_len > 44 or value_len > 44 or key_len + value_len > 65: module.fail_json( rc=5, - msg="The length of a keyword=option is exceeding 66 characters or length of an individual value is exceeding 44 characters." + msg="The length of a keyword=option exceeded 66 characters or length of an individual value exceeded 44 characters." + "key:{0}, value:{1}".format(key, value), changed=False ) else: if keyword_parameters_string: - keyword_parameters_string = keyword_parameters_string + "," + f"{key}={value}" + keyword_parameters_string = f"{keyword_parameters_string},{key}={value}" else: keyword_parameters_string = f"{key}={value}" if job_name: @@ -659,17 +658,17 @@ def validate_and_prepare_start_command(module): elif member: started_task_name = member if identifier: - started_task_name = started_task_name + "." + identifier + started_task_name = f"{started_task_name}.{identifier}" else: module.fail_json( rc=5, - msg="member_name is missing which is mandatory.", + msg="member_name is missing which is mandatory to start a started task.", changed=False ) if not member: module.fail_json( rc=5, - msg="member_name is missing which is mandatory.", + msg="member_name is missing which is mandatory to start a started task.", changed=False ) if job_name and identifier: @@ -681,29 +680,29 @@ def validate_and_prepare_start_command(module): parameters_updated = "" if parameters: if len(parameters) == 1: - parameters_updated = "'" + parameters[0] + "'" + parameters_updated = f"'{parameters[0]}'" else: parameters_updated = f"({','.join(parameters)})" - cmd = 'S ' + member + cmd = f"S {member}" if identifier: - cmd = cmd + "." + identifier + cmd = f"{cmd}.{identifier}" if parameters: - cmd = cmd + "," + device + "," + volume_serial + "," + parameters_updated + cmd = f"{cmd},{device},{volume_serial},{parameters_updated}" elif volume_serial: - cmd = cmd + "," + device + "," + volume_serial + cmd = f"{cmd},{device},{volume_serial}" elif device: - cmd = cmd + "," + device + cmd = f"{cmd},{device}" if job_name: - cmd = cmd + ",JOBNAME=" + job_name + cmd = f"{cmd},JOBNAME={job_name}" if job_account: - cmd = cmd + ",JOBACCT=" + job_account + cmd = f"{cmd},JOBACCT={job_account}" if subsystem_name: - cmd = cmd + ",SUB=" + subsystem_name + cmd = f"{cmd},SUB={subsystem_name}" if reus_asid: - cmd = cmd + ",REUSASID=" + reus_asid + cmd = f"{cmd},REUSASID={reus_asid}" if keyword_parameters_string: - cmd = cmd + "," + keyword_parameters_string + cmd = f"{cmd},{keyword_parameters_string}" return started_task_name, cmd @@ -728,14 +727,14 @@ def prepare_display_command(module): if job_name: started_task_name = job_name if identifier: - started_task_name = started_task_name + "." + identifier + started_task_name = f"{started_task_name}.{identifier}" else: module.fail_json( rc=5, - msg="job_name is missing which is mandatory.", + msg="job_name is missing which is mandatory to display started task details.", changed=False ) - cmd = 'D A,' + started_task_name + cmd = f"D A,{started_task_name}" return started_task_name, cmd @@ -761,16 +760,16 @@ def prepare_stop_command(module): if job_name: started_task_name = job_name if identifier: - started_task_name = started_task_name + "." + identifier + started_task_name = f"{started_task_name}.{identifier}" else: module.fail_json( rc=5, - msg="job_name is missing which is mandatory.", + msg="job_name is missing which is mandatory to display started task details.", changed=False ) - cmd = 'P ' + started_task_name + cmd = f"P {started_task_name}" if asid: - cmd = cmd + ',A=' + asid + cmd = f"{cmd},A={asid}" return started_task_name, cmd @@ -796,20 +795,20 @@ def prepare_modify_command(module): if job_name: started_task_name = job_name if identifier: - started_task_name = started_task_name + "." + identifier + started_task_name = f"{started_task_name}.{identifier}" else: module.fail_json( rc=5, - msg="job_name is missing which is mandatory.", + msg="job_name is missing which is mandatory to display started task details.", changed=False ) if parameters is None: module.fail_json( rc=5, - msg="parameters are mandatory.", + msg="parameters are mandatory while modifying a started task.", changed=False ) - cmd = 'F ' + started_task_name + "," + ",".join(parameters) + cmd = f"F {started_task_name},{','.join(parameters)}" return started_task_name, cmd @@ -838,9 +837,9 @@ def prepare_cancel_command(module): if job_name: started_task_name = job_name if identifier: - started_task_name = started_task_name + "." + identifier + started_task_name = f"{started_task_name}.{identifier}" elif userid: - started_task_name = "U=" + userid + started_task_name = f"U={userid}" else: module.fail_json( rc=5, @@ -853,13 +852,13 @@ def prepare_cancel_command(module): msg="The ARMRESTART parameter is not valid with the U=userid parameter.", changed=False ) - cmd = 'C ' + started_task_name + cmd = f"C {started_task_name}" if asid: - cmd = cmd + ',A=' + asid + cmd = f"{cmd},A={asid}" if dump: - cmd = cmd + ',DUMP' + cmd = f"{cmd},DUMP" if armrestart: - cmd = cmd + ',ARMRESTART' + cmd = f"{cmd},ARMRESTART" return started_task_name, cmd @@ -907,26 +906,26 @@ def prepare_force_command(module): if job_name: started_task_name = job_name if identifier: - started_task_name = started_task_name + "." + identifier + started_task_name = f"{started_task_name}.{identifier}" elif userid: - started_task_name = "U=" + userid + started_task_name = f"U={userid}" else: module.fail_json( rc=5, msg="Both job_name and userid are missing, one of them is needed to cancel a task.", changed=False ) - cmd = 'FORCE ' + started_task_name + cmd = f"FORCE {started_task_name}" if asid: - cmd = cmd + ',A=' + asid + cmd = f"{cmd},A={asid}" if arm: - cmd = cmd + ',ARM' + cmd = f"{cmd},ARM" if armrestart: - cmd = cmd + ',ARMRESTART' + cmd = f"{cmd},ARMRESTART" if tcb_address: - cmd = cmd + ',TCB=' + tcb_address + cmd = f"{cmd},TCB={tcb_address}" if retry: - cmd = cmd + ',RETRY=' + retry + cmd = f"{cmd},RETRY={retry}" return started_task_name, cmd @@ -958,7 +957,7 @@ def extract_keys(stdout): 'ASTE': 'data_space_address_entry', 'ADDR SPACE ASTE': 'address_space_second_table_entry', 'RGP': 'resource_group', - 'DSPNAME': 'dataspace_name', + 'DSPNAME': 'data_space_name', 'DMN': 'domain_number', 'AFF': 'affinity', 'SRVR': 'server', @@ -1310,6 +1309,7 @@ def run_module(): 'IEE535I': 'A parameter on a command is not valid.', 'IEE307I': 'Command parameter punctuation is incorrect or parameter is not followed by a blank.', 'ERROR': 'Member is missing in PROCLIB or JCL is invalid or issue with JCL execution.', + 'NOT FOUND': 'Started task is not active', 'IEE341I': 'Started task is not active', 'REJECTED': 'Started task is not accepting modification.', 'IEE324I': 'The userid specified on the command is not currently active in the system..', @@ -1361,6 +1361,7 @@ def run_module(): is_failed = False system_logs = "" msg = "" + # Find failure found_msg = next((msg for msg in err_msg if msg in out), None) if err != "" or found_msg: is_failed = True @@ -1379,7 +1380,7 @@ def run_module(): if rc == 0: rc = 1 changed = False - msg = error_details[found_msg] + msg = error_details.get(found_msg, found_msg) stdout = out stderr = err if err == "" or err is None: diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index b0ec69e9ad..e7d461d4ce 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -778,6 +778,7 @@ def test_force_and_start_with_icsf_task(ansible_zos_module): task = "ICSF" ) for result in display_results.contacted.values(): + print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" From 8852bfa198ab82610841d59a36dde1db749642e6 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Sat, 27 Sep 2025 23:46:10 +0530 Subject: [PATCH 67/73] Update zos_started_task.rst --- docs/source/modules/zos_started_task.rst | 162 +++++++++++++---------- 1 file changed, 89 insertions(+), 73 deletions(-) diff --git a/docs/source/modules/zos_started_task.rst b/docs/source/modules/zos_started_task.rst index b711fc39f0..a8f9b8c31d 100644 --- a/docs/source/modules/zos_started_task.rst +++ b/docs/source/modules/zos_started_task.rst @@ -29,95 +29,95 @@ Parameters arm *arm* indicates to execute normal task termination routines without causing address space destruction. - Only applicable when state is forced, otherwise is ignored. + Only applicable when *state* is ``forced``, otherwise ignored. | **required**: False | **type**: bool armrestart - Indicates that the batch job or started task should be automatically restarted after the cancel completes, if it is registered as an element of the automatic restart manager. If the job or task is not registered or if you do not specify this parameter, MVS will not automatically restart the job or task. + Indicates that the batch job or started task should be automatically restarted after CANCEL or FORCE completes, if it is registered as an element of the automatic restart manager. If the job or task is not registered or if you do not specify this parameter, MVS will not automatically restart the job or task. - Only applicable when state is cancelled or forced, otherwise is ignored. + Only applicable when *state* is ``cancelled`` or ``forced``, otherwise ignored. | **required**: False | **type**: bool asid - When state is cancelled or stopped or forced, asid is the hexadecimal address space identifier of the work unit you want to cancel, stop or force. + When *state* is ``cancelled``, ``stopped`` or ``forced``, *asid* is the hexadecimal address space identifier of the work unit you want to cancel, stop or force. - Only applicable when state is stopped or cancelled or forced, otherwise is ignored. + Only applicable when *state* is ``stopped``, ``cancelled``, or ``forced``, otherwise ignored. | **required**: False | **type**: str device_type - Option device_type is the type of the output device (if any) associated with the task. + Type of the output device (if any) associated with the task. - Only applicable when state is started otherwise ignored. + Only applicable when *state* is ``started``, otherwise ignored. | **required**: False | **type**: str device_number - Option device_number is the number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. + Number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. - Only applicable when state=started otherwise ignored. + Only applicable when *state* is ``started``, otherwise ignored. | **required**: False | **type**: str dump - A dump is to be taken. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) depends on the JCL for the job. + Whether to perform a dump. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) depends on the JCL for the job. - Only applicable when state is cancelled otherwise ignored. + Only applicable when *state* is ``cancelled``, otherwise ignored. | **required**: False | **type**: bool identifier_name - Option identifier_name is the name that identifies the task. This name can be up to 8 characters long. The first character must be alphabetical. + Option *identifier_name* is the name that identifies the task. This name can be up to 8 characters long. The first character must be alphabetical. | **required**: False | **type**: str job_account - Option job_account specifies accounting data in the JCL JOB statement for the started task. If the source JCL was a job and has already accounting data, the value that is specified on this parameter overrides the accounting data in the source JCL. + Specifies accounting data in the JCL JOB statement for the started task. If the source JCL already had accounting data, the value that is specified on this parameter overrides it. - Only applicable when state is started otherwise ignored. + Only applicable when *state* is ``started``, otherwise ignored. | **required**: False | **type**: str job_name - When state=started job_name is a name which should be assigned to a started task while starting it. If job_name is not specified, then member_name is used as job_name. Otherwise, job_name is the started task job name used to find and apply the state selected. + When *state* is started, this is the name which should be assigned to a started task while starting it. If *job_name* is not specified, then *member_name* is used as job's name. - When state is displayed or modified or cancelled or stopped or forced, job_name is the started task name. + When *state* is ``displayed``, ``modified``, ``cancelled``, ``stopped``, or ``forced``, *job_name* is the started task name. | **required**: False | **type**: str keyword_parameters - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. The maximum length of each keyword=option is 66 characters. No individual value within this field can be longer than 44 characters in length. + Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. The maximum length of each keyword=option pair is 66 characters. No individual value within this field can be longer than 44 characters in length. - Only applicable when state is started otherwise ignored. + Only applicable when *state* is ``started``, otherwise ignored. | **required**: False | **type**: dict member_name - Option member_name is a 1 - 8 character name of a member of a partitioned data set that contains the source JCL for the task to be started. The member can be either a job or a cataloged procedure. + Name of a member of a partitioned data set that contains the source JCL for the task to be started. The member can be either a job or a cataloged procedure. - Only applicable when state is started otherwise ignored. + Only applicable when *state* is ``started``, otherwise ignored. | **required**: False | **type**: str @@ -126,41 +126,41 @@ member_name parameters Program parameters passed to the started program. - Only applicable when state is started or modified otherwise ignored. + Only applicable when *state* is ``started`` or ``modified``, otherwise ignored. | **required**: False | **type**: list | **elements**: str -retry - *retry* is applicable for only FORCE TCB. +retry_force + Indicates whether retry will be attempted on ABTER:ref:`abnormal termination `. - Only applicable when state= is forced otherwise ignored. + *tcb_address* is mandatory to use *retry_force*. + + Only applicable when *state* is ``forced``, otherwise ignored. | **required**: False - | **type**: str - | **choices**: YES, NO + | **type**: bool reus_asid - When REUSASID=YES is specified on the START command and REUSASID(YES) is specified in the DIAGxx parmlib member, a reusable ASID is assigned to the address space created by the START command. If REUSASID=YES is not specified on the START command or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. + When *reus_asid* is ``True`` and REUSASID(YES) is specified in the DIAGxx parmlib member, a reusable ASID is assigned to the address space created by the START command. If *reus_asid* is not specified or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. - Only applicable when state is started otherwise ignored. + Only applicable when *state* is ``started``, otherwise ignored. | **required**: False - | **type**: str - | **choices**: YES, NO + | **type**: bool state *state* should be the desired state of the started task after the module is executed. - If state is started and the respective member is not present on the managed node, then error will be thrown with rc=1, changed=false and stderr which contains error details. + If *state* is ``started`` and the respective member is not present on the managed node, then error will be thrown with ``rc=1``, ``changed=false`` and *stderr* which contains error details. - If state is cancelled , modified, displayed, stopped or forced and the started task is not running on the managed node, then error will be thrown with rc=1, changed=false and stderr contains error details. + If *state* is ``cancelled``, ``modified``, ``displayed``, ``stopped`` or ``forced`` and the started task is not running on the managed node, then error will be thrown with ``rc=1``, ``changed=false`` and *stderr* contains error details. - If state is displayed and the started task is running, then the module will return the started task details along with changed=true. + If *state* is ``displayed`` and the started task is running, then the module will return the started task details along with ``changed=true``. | **required**: True | **type**: str @@ -168,27 +168,27 @@ state subsystem - The name of the subsystem that selects the task for processing. The name must be 1 - 4 characters, which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. + The name of the subsystem that selects the task for processing. The name must be 1-4 characters long, which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. - Only applicable when state is started otherwise ignored. + Only applicable when *state* is ``started``, otherwise ignored. | **required**: False | **type**: str tcb_address - *tcb_address* is a 6-digit hexadecimal TCB address of the task to terminate. + 6-digit hexadecimal TCB address of the task to terminate. - Only applicable when state is forced otherwise ignored. + Only applicable when *state* is ``forced``, otherwise ignored. | **required**: False | **type**: str -volume_serial - If devicetype is a tape or direct-access device, the volume serial number of the volume is mounted on the device. +volume + If *device_type* is a tape or direct-access device, the serial number of the volume, mounted on the device. - Only applicable when state is started otherwise ignored. + Only applicable when *state* is ``started``, otherwise ignored. | **required**: False | **type**: str @@ -197,14 +197,14 @@ volume_serial userid The user ID of the time-sharing user you want to cancel or force. - Only applicable when state= is cancelled or forced , otherwise ignored. + Only applicable when *state* is ``cancelled`` or ``forced``, otherwise ignored. | **required**: False | **type**: str verbose - When verbose=true return system logs that describe the task execution. Using this option will can return a big response depending on system load, also it could surface other programs activity. + When ``verbose=true``, the module will return system logs that describe the task's execution. This option can return a big response depending on system load, also it could surface other program's activity. | **required**: False | **type**: bool @@ -212,7 +212,7 @@ verbose wait_time - Option wait_time is the total time that module zos_started_task will wait for a submitted task in centiseconds. The time begins when the module is executed on the managed node. Default value of 0 means to wait the default amount of time supported by the opercmd utility. + Total time that the module will wait for a submitted task, measured in seconds. The time begins when the module is executed on the managed node. Default value of 0 means to wait the default amount of time supported by the opercmd utility. | **required**: False | **type**: int @@ -241,37 +241,37 @@ Examples .. code-block:: yaml+jinja - - name: Start a started task using member name. + - name: Start a started task using a member in a partitioned data set. zos_started_task: state: "started" member: "PROCAPP" - - name: Start a started task using member name and identifier. + - name: Start a started task using a member name and giving it an identifier. zos_started_task: state: "started" member: "PROCAPP" identifier: "SAMPLE" - - name: Start a started task using member name and job. + - name: Start a started task using both a member and a job name. zos_started_task: state: "started" member: "PROCAPP" job_name: "SAMPLE" - - name: Start a started task using member name, job and enable verbose. + - name: Start a started task and enable verbose output. zos_started_task: state: "started" member: "PROCAPP" job_name: "SAMPLE" verbose: True - - name: Start a started task using member name, subsystem and enable reuse asid. + - name: Start a started task specifying the subsystem and enabling a reusable ASID. zos_started_task: state: "started" member: "PROCAPP" subsystem: "MSTR" reus_asid: "YES" - - name: Display a started task using started task name. + - name: Display a started task using a started task name. zos_started_task: state: "displayed" task_name: "PROCAPP" - - name: Display started tasks using matching regex. + - name: Display all started tasks that begin with an s using a wildcard. zos_started_task: state: "displayed" task_name: "s*" @@ -279,31 +279,31 @@ Examples zos_started_task: state: "displayed" task_name: "all" - - name: Cancel a started tasks using task name. + - name: Cancel a started task using task name. zos_started_task: state: "cancelled" task_name: "SAMPLE" - - name: Cancel a started tasks using task name and asid. + - name: Cancel a started task using it's task name and ASID. zos_started_task: state: "cancelled" task_name: "SAMPLE" asid: 0014 - - name: Cancel a started tasks using task name and asid. + - name: Modify a started task's parameters. zos_started_task: state: "modified" task_name: "SAMPLE" parameters: ["XX=12"] - - name: Stop a started task using task name. + - name: Stop a started task using it's task name. zos_started_task: state: "stopped" task_name: "SAMPLE" - - name: Stop a started task using task name, identifier and asid. + - name: Stop a started task using it's task name, identifier and ASID. zos_started_task: state: "stopped" task_name: "SAMPLE" identifier: "SAMPLE" asid: 00A5 - - name: Force a started task using task name. + - name: Force a started task using it's task name. zos_started_task: state: "forced" task_name: "SAMPLE" @@ -352,7 +352,7 @@ rc | **type**: int state - The final state of the started task, after execution.. + The final state of the started task, after execution. | **returned**: changed | **type**: str @@ -363,7 +363,7 @@ stderr | **returned**: changed | **type**: str - | **sample**: An error has ocurred. + | **sample**: An error has occurred. stderr_lines List of strings containing individual lines from STDERR. @@ -375,7 +375,7 @@ stderr_lines .. code-block:: json [ - "An error has ocurred" + "An error has occurred" ] stdout @@ -436,7 +436,7 @@ tasks | **sample**: 000.008S dataspaces - The started task dataspaces details. + The started task data spaces details. | **returned**: success | **type**: list @@ -448,7 +448,7 @@ tasks | **type**: str | **sample**: 058F2180 - dataspace_name + data_space_name Data space name associated with the address space. | **type**: str @@ -456,6 +456,8 @@ tasks domain_number + The z/OS system or sysplex domain where started task is running. + domain_number=N/A if the system is operating in goal mode. | **type**: str @@ -466,13 +468,15 @@ tasks For system address spaces created before master scheduler initialization, the elapsed time since master scheduler initialization. - For system address spaces created after master scheduler initialization, the elapsed time since system address space creation. elapsed_time has one of these below formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. sss.tttS when time is less than 1000 seconds hh.mm.ss when time is at least 1000 seconds, but less than 100 hours hhhhh.mm when time is at least 100 hours ******** when time exceeds 100000 hours NOTAVAIL when the TOD clock is not working + For system address spaces created after master scheduler initialization, the elapsed time since system address space creation. + + elapsed_time has one of following formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. sss.tttS when time is less than 1000 seconds hh.mm.ss when time is at least 1000 seconds, but less than 100 hours hhhhh.mm when time is at least 100 hours ******** when time exceeds 100000 hours NOTAVAIL when the TOD clock is not working | **type**: str | **sample**: 812.983S priority - The priority of a started task is determined by the Workload Manager (WLM), based on the service class and importance assigned to it. + Priority of a started task, as determined by the Workload Manager (WLM), based on the service class and importance assigned to it. | **type**: str | **sample**: 1 @@ -497,6 +501,8 @@ tasks | **type**: str program_name + The name of the program(load module) that created or is running in the started task's address space. + program_name=N/A if the system is operating in goal mode. | **type**: str @@ -510,7 +516,7 @@ tasks | **type**: str resource_group - The name of the resource group currently associated the service class. It can also be N/A if there is no resource group association. + The name of the resource group currently associated with the service class. It can also be N/A if there is no resource group association. | **type**: str | **sample**: N/A @@ -548,9 +554,9 @@ tasks The name of a step that called a cataloged procedure. - STARTING if initiation of a started job, system task, or attached APPC transaction program is incomplete. + ``STARTING`` if initiation of a started job, system task, or attached APPC transaction program is incomplete. - MASTER* for the master address space. + ``*MASTER*`` for the master address space. The name of an initiator address space. @@ -564,23 +570,33 @@ tasks | **sample**: SAMPLE task_status - IN for swapped in. + ``IN`` for swapped in. - OUT for swapped out, ready to run. + ``OUT`` for swapped out, ready to run. - OWT for swapped out, waiting, not ready to run. + ``OWT`` for swapped out, waiting, not ready to run. - OU* for in process of being swapped out. + ``OU*`` for in process of being swapped out. - IN* for in process of being swapped in. + ``IN*`` for in process of being swapped in. - NSW for non-swappable. + ``NSW`` for non-swappable. | **type**: str | **sample**: NSW task_type - S for started task. + ``S`` for started task. + + ``A`` for an attached APPC transaction program. + + ``I`` for initiator address space. + + ``J`` for job + + ``M`` for mount + + ``*`` for system address space | **type**: str | **sample**: S From d44bcf7457acabc806e212ae5fa5ee304cacc2dc Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Mon, 29 Sep 2025 16:08:02 +0530 Subject: [PATCH 68/73] Updating doc changes --- plugins/modules/zos_started_task.py | 44 ++++++++++--------- .../modules/test_zos_started_task_func.py | 1 - 2 files changed, 24 insertions(+), 21 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index b3981c464d..60bc61d39b 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -305,31 +305,31 @@ state: description: - The final state of the started task, after execution. - returned: changed + returned: success type: str sample: S SAMPLE stderr: description: - The STDERR from the command, may be empty. - returned: changed + returned: failure type: str sample: An error has occurred. stderr_lines: description: - List of strings containing individual lines from STDERR. - returned: changed + returned: failure type: list sample: ["An error has occurred"] stdout: description: - The STDOUT from the command, may be empty. - returned: changed + returned: success type: str sample: ISF031I CONSOLE OMVS0000 ACTIVATED. stdout_lines: description: - List of strings containing individual lines from STDOUT. - returned: changed + returned: success type: list sample: ["Allocation to SYSEXEC completed."] tasks: @@ -503,9 +503,9 @@ sample: SYSTEM verbose_output: description: - - If C(verbose=true), the system log related to the started task executed state will be shown. - returned: changed - type: list + - If C(verbose=true), the system logs related to the started task executed state will be shown. + returned: success + type: str sample: NC0000000 ZOSMACHINE 25240 12:40:30.15 OMVS0000 00000210.... """ @@ -534,10 +534,12 @@ def execute_command(operator_cmd, started_task_name, execute_display_before=Fals ---------- operator_cmd : str Operator command. + started_task_name : str + Name of the started task. + execute_display_before: bool + Indicates whether display command need to be executed before actual command or not. timeout_s : int Timeout to wait for the command execution, measured in centiseconds. - *args : dict - Arguments for the command. **kwargs : dict More arguments for the command. @@ -566,8 +568,8 @@ def execute_display_command(started_task_name, timeout=0): Parameters ---------- started_task_name : str - The name of started task. - timeout_s : int + Name of the started task. + timeout : int Timeout to wait for the command execution, measured in centiseconds. Returns @@ -588,7 +590,7 @@ def validate_and_prepare_start_command(module): Parameters ---------- - start_parms : dict + module : dict The started task start command parameters. Returns @@ -711,7 +713,7 @@ def prepare_display_command(module): Parameters ---------- - display_parms : dict + module : dict The started task display command parameters. Returns @@ -743,7 +745,7 @@ def prepare_stop_command(module): Parameters ---------- - stop_parms : dict + module : dict The started task stop command parameters. Returns @@ -778,7 +780,7 @@ def prepare_modify_command(module): Parameters ---------- - modify_parms : dict + module : dict The started task modify command parameters. Returns @@ -817,7 +819,7 @@ def prepare_cancel_command(module): Parameters ---------- - cancel_parms : dict + module : dict The started task modify command parameters. Returns @@ -996,7 +998,7 @@ def extract_keys(stdout): elif current_task: data_space = {} for match in kv_pattern.finditer(line): - dsp_keys = ['dataspace_name', 'data_space_address_entry'] + dsp_keys = ['data_space_name', 'data_space_address_entry'] key, value = match.groups() if key in keys: key = keys[key] @@ -1050,11 +1052,13 @@ def fetch_logs(command, timeout): ---------- command : string The comand which need to be checked in system logs + timeout: int + The timeout value passed in input. Returns ------- - list - The list of logs from SYSLOG + str + Logs from SYSLOG """ time_mins = timeout // 60 + 1 option = '-t' + str(time_mins) diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index e7d461d4ce..b0ec69e9ad 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -778,7 +778,6 @@ def test_force_and_start_with_icsf_task(ansible_zos_module): task = "ICSF" ) for result in display_results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" From b726c6ee4cc659d912a6456ef661e63cfb95c196 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Mon, 29 Sep 2025 23:55:54 +0530 Subject: [PATCH 69/73] Adding document updates --- docs/source/modules/zos_started_task.rst | 26 ++++++++++-------------- plugins/modules/zos_started_task.py | 4 ++-- 2 files changed, 13 insertions(+), 17 deletions(-) diff --git a/docs/source/modules/zos_started_task.rst b/docs/source/modules/zos_started_task.rst index a8f9b8c31d..b4346876fb 100644 --- a/docs/source/modules/zos_started_task.rst +++ b/docs/source/modules/zos_started_task.rst @@ -63,7 +63,7 @@ device_type device_number - Number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but is not before a 3-digit number. + Number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but not a 3-digit number. Only applicable when *state* is ``started``, otherwise ignored. @@ -354,21 +354,21 @@ rc state The final state of the started task, after execution. - | **returned**: changed + | **returned**: success | **type**: str | **sample**: S SAMPLE stderr The STDERR from the command, may be empty. - | **returned**: changed + | **returned**: failure | **type**: str | **sample**: An error has occurred. stderr_lines List of strings containing individual lines from STDERR. - | **returned**: changed + | **returned**: failure | **type**: list | **sample**: @@ -381,14 +381,14 @@ stderr_lines stdout The STDOUT from the command, may be empty. - | **returned**: changed + | **returned**: success | **type**: str | **sample**: ISF031I CONSOLE OMVS0000 ACTIVATED. stdout_lines List of strings containing individual lines from STDOUT. - | **returned**: changed + | **returned**: success | **type**: list | **sample**: @@ -430,7 +430,7 @@ tasks cpu_time The processor time used by the address space, including the initiator. This time does not include SRB time. - cpu_time has one of these below formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. sss.tttS when time is less than 1000 seconds hh.mm.ss when time is at least 1000 seconds, but less than 100 hours hhhhh.mm when time is at least 100 hours ******** when time exceeds 100000 hours NOTAVAIL when the TOD clock is not working + cpu_time has one of following formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. sss.tttS when time is less than 1000 seconds hh.mm.ss when time is at least 1000 seconds, but less than 100 hours hhhhh.mm when time is at least 100 hours ******** when time exceeds 100000 hours NOTAVAIL when the TOD clock is not working | **type**: str | **sample**: 000.008S @@ -609,13 +609,9 @@ tasks verbose_output - If ``verbose=true``, the system log related to the started task executed state will be shown. + If ``verbose=true``, the system logs related to the started task executed state will be shown. - | **returned**: changed - | **type**: list - | **sample**: - - .. code-block:: json - - "NC0000000 ZOSMACHINE 25240 12:40:30.15 OMVS0000 00000210...." + | **returned**: success + | **type**: str + | **sample**: NC0000000 ZOSMACHINE 25240 12:40:30.15 OMVS0000 00000210.... diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 60bc61d39b..502aa52b14 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -57,7 +57,7 @@ device_number: description: - Number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must - precede a 4-digit number but is not before a 3-digit number. + precede a 4-digit number but not a 3-digit number. - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str @@ -359,7 +359,7 @@ cpu_time: description: - The processor time used by the address space, including the initiator. This time does not include SRB time. - - cpu_time has one of these below formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. + - cpu_time has one of following formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. sss.tttS when time is less than 1000 seconds hh.mm.ss when time is at least 1000 seconds, but less than 100 hours hhhhh.mm when time is at least 100 hours From f52715bec9aa48c5eb3f5f24ab2ebafc121faa3c Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Thu, 9 Oct 2025 01:31:19 +0530 Subject: [PATCH 70/73] Added enhancements supporting task_id filtering tasks using timestamp when state is started commenting untested options. Adding testcases --- plugins/modules/zos_started_task.py | 569 ++++++++++-------- .../modules/test_zos_started_task_func.py | 493 +++++++++++---- 2 files changed, 687 insertions(+), 375 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 502aa52b14..68d8034e6d 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -41,26 +41,26 @@ - Only applicable when I(state) is C(cancelled) or C(forced), otherwise ignored. required: false type: bool - asid: + asidx: description: - - When I(state) is C(cancelled), C(stopped) or C(forced), I(asid) is the hexadecimal address space + - When I(state) is C(cancelled), C(stopped) or C(forced), I(asidx) is the hexadecimal address space identifier of the work unit you want to cancel, stop or force. - Only applicable when I(state) is C(stopped), C(cancelled), or C(forced), otherwise ignored. required: false type: str - device_type: - description: - - Type of the output device (if any) associated with the task. - - Only applicable when I(state) is C(started), otherwise ignored. - required: false - type: str - device_number: - description: - - Number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must - precede a 4-digit number but not a 3-digit number. - - Only applicable when I(state) is C(started), otherwise ignored. - required: false - type: str +# device_type: +# description: +# - Type of the output device (if any) associated with the task. +# - Only applicable when I(state) is C(started), otherwise ignored. +# required: false +# type: str +# device_number: +# description: +# - Number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must +# precede a 4-digit number but not a 3-digit number. +# - Only applicable when I(state) is C(started), otherwise ignored. +# required: false +# type: str dump: description: - Whether to perform a dump. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) @@ -119,13 +119,13 @@ required: false type: list elements: str - retry_force: - description: - - Indicates whether retry will be attempted on ABTERM(abnormal termination). - - I(tcb_address) is mandatory to use I(retry_force). - - Only applicable when I(state) is C(forced), otherwise ignored. - required: false - type: bool +# retry_force: +# description: +# - Indicates whether retry will be attempted on ABTERM(abnormal termination). +# - I(tcb_address) is mandatory to use I(retry_force). +# - Only applicable when I(state) is C(forced), otherwise ignored. +# required: false +# type: bool reus_asid: description: - When I(reus_asid) is C(True) and REUSASID(YES) is specified in the DIAGxx parmlib member, a reusable ASID is assigned @@ -160,19 +160,25 @@ - Only applicable when I(state) is C(started), otherwise ignored. required: false type: str - tcb_address: - description: - - 6-digit hexadecimal TCB address of the task to terminate. - - Only applicable when I(state) is C(forced), otherwise ignored. - required: false - type: str - volume: + task_id: description: - - If I(device_type) is a tape or direct-access device, the serial number of the volume, - mounted on the device. - - Only applicable when I(state) is C(started), otherwise ignored. + - The started task id starts with STC. + - Only applicable when I(state) is C(displayed), C(modified), C(cancelled), C(stopped), or C(forced), otherwise ignored. required: false type: str +# tcb_address: +# description: +# - 6-digit hexadecimal TCB address of the task to terminate. +# - Only applicable when I(state) is C(forced), otherwise ignored. +# required: false +# type: str +# volume: +# description: +# - If I(device_type) is a tape or direct-access device, the serial number of the volume, +# mounted on the device. +# - Only applicable when I(state) is C(started), otherwise ignored. +# required: false +# type: str userid: description: - The user ID of the time-sharing user you want to cancel or force. @@ -238,6 +244,10 @@ zos_started_task: state: "displayed" task_name: "PROCAPP" +- name: Display a started task using a started task id. + zos_started_task: + state: "displayed" + task_id: "STC00012" - name: Display all started tasks that begin with an s using a wildcard. zos_started_task: state: "displayed" @@ -250,30 +260,47 @@ zos_started_task: state: "cancelled" task_name: "SAMPLE" +- name: Cancel a started task using a started task id. + zos_started_task: + state: "cancelled" + task_id: "STC00093" - name: Cancel a started task using it's task name and ASID. zos_started_task: state: "cancelled" task_name: "SAMPLE" - asid: 0014 + asidx: 0014 - name: Modify a started task's parameters. zos_started_task: state: "modified" task_name: "SAMPLE" parameters: ["XX=12"] +- name: Modify a started task's parameters using a started task id. + zos_started_task: + state: "modified" + task_id: "STC00034" + parameters: ["XX=12"] - name: Stop a started task using it's task name. zos_started_task: state: "stopped" task_name: "SAMPLE" +- name: Stop a started task using a started task id. + zos_started_task: + state: "stopped" + task_id: "STC00087" - name: Stop a started task using it's task name, identifier and ASID. zos_started_task: state: "stopped" task_name: "SAMPLE" identifier: "SAMPLE" - asid: 00A5 + asidx: 00A5 - name: Force a started task using it's task name. zos_started_task: state: "forced" task_name: "SAMPLE" +- name: Force a started task using it's task id. + zos_started_task: + state: "forced" + task_id: "STC00065" """ RETURN = r""" @@ -298,7 +325,9 @@ description: - The return code is 0 when command executed successfully. - The return code is 1 when opercmd throws any error. + - The return code is 4 when task_id format is invalid. - The return code is 5 when any parameter validation failed. + - The return code is 8 when started task is not found using task_id. returned: changed type: int sample: 0 @@ -340,18 +369,7 @@ type: list elements: dict contains: - address_space_second_table_entry: - description: - - The control block used to manage memory for a started task - type: str - sample: 03E78500 - affinity: - description: - - The identifier of the processor, for up to any four processors, if the job requires the services of specific processors. - - affinity=NONE means the job can run on any processor. - type: str - sample: NONE - asid: + asidx: description: - Address space identifier (ASID), in hexadecimal. type: str @@ -367,29 +385,6 @@ NOTAVAIL when the TOD clock is not working type: str sample: 000.008S - dataspaces: - description: - - The started task data spaces details. - returned: success - type: list - elements: dict - contains: - data_space_address_entry: - description: - - Central address of the data space ASTE. - type: str - sample: 058F2180 - data_space_name: - description: - - Data space name associated with the address space. - type: str - sample: CIRRGMAP - domain_number: - description: - - The z/OS system or sysplex domain where started task is running. - - domain_number=N/A if the system is operating in goal mode. - type: str - sample: N/A elapsed_time: description: - For address spaces other than system address spaces, the elapsed time since job select time. @@ -403,63 +398,16 @@ NOTAVAIL when the TOD clock is not working type: str sample: 812.983S - priority: - description: - - Priority of a started task, as determined by the Workload Manager (WLM), based on the service class and importance assigned to it. - type: str - sample: 1 - proc_step_name: - description: - - For APPC-initiated transactions, the user ID requesting the transaction. - - The name of a step within a cataloged procedure that was called by the step specified in field sss. - - Blank, if there is no cataloged procedure. - - The identifier of the requesting transaction program. - type: str - sample: VLF - program_event_recording: - description: - - YES if A PER trap is active in the address space. - - NO if No PER trap is active in the address space. - type: str - sample: NO - program_name: - description: - - The name of the program(load module) that created or is running in the started task's address space. - - program_name=N/A if the system is operating in goal mode. - type: str - sample: N/A - queue_scan_count: - description: - - YES if the address space has been quiesced. - - NO if the address space is not quiesced. - type: str - sample: NO - resource_group: - description: - - The name of the resource group currently associated with the service class. It can also be N/A if there is no resource group association. - type: str - sample: N/A - server: - description: - - YES if the address space is a server. - - No if the address space is not a server. - type: str - sample: NO - started_class_list: - description: - - The name of the service class currently associated with the address space. - type: str - sample: SYSSTC started_time: description: - The time when the started task started. type: str sample: "2025-09-11 18:21:50.293644+00:00" - system_management_control: + task_id: description: - - Number of outstanding step-must-complete requests. + - The started task id. type: str - sample: 000 + sample: STC00018 task_identifier: description: - The name of a system address space. @@ -476,31 +424,6 @@ - The name of the started task. type: str sample: SAMPLE - task_status: - description: - - C(IN) for swapped in. - - C(OUT) for swapped out, ready to run. - - C(OWT) for swapped out, waiting, not ready to run. - - C(OU*) for in process of being swapped out. - - C(IN*) for in process of being swapped in. - - C(NSW) for non-swappable. - type: str - sample: NSW - task_type: - description: - - C(S) for started task. - - C(A) for an attached APPC transaction program. - - C(I) for initiator address space. - - C(J) for job - - C(M) for mount - - C(*) for system address space - type: str - sample: S - workload_manager: - description: - - The name of the workload currently associated with the address space. - type: str - sample: SYSTEM verbose_output: description: - If C(verbose=true), the system logs related to the started task executed state will be shown. @@ -522,12 +445,14 @@ ) try: - from zoautil_py import opercmd, zsystem + from zoautil_py import opercmd, zsystem, jobs except ImportError: - zoau_exceptions = ZOAUImportError(traceback.format_exc()) + opercmd = ZOAUImportError(traceback.format_exc()) + zsystem = ZOAUImportError(traceback.format_exc()) + jobs = ZOAUImportError(traceback.format_exc()) -def execute_command(operator_cmd, started_task_name, execute_display_before=False, timeout_s=0, **kwargs): +def execute_command(operator_cmd, started_task_name, asidx, execute_display_before=False, timeout_s=0, **kwargs): """Execute operator command. Parameters @@ -536,6 +461,8 @@ def execute_command(operator_cmd, started_task_name, execute_display_before=Fals Operator command. started_task_name : str Name of the started task. + asidx : string + The HEX adress space identifier. execute_display_before: bool Indicates whether display command need to be executed before actual command or not. timeout_s : int @@ -553,7 +480,7 @@ def execute_command(operator_cmd, started_task_name, execute_display_before=Fals # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: timeout_c = 100 * timeout_s if execute_display_before: - task_params = execute_display_command(started_task_name) + task_params = execute_display_command(started_task_name, asidx) response = opercmd.execute(operator_cmd, timeout_c, **kwargs) rc = response.rc @@ -562,13 +489,17 @@ def execute_command(operator_cmd, started_task_name, execute_display_before=Fals return rc, stdout, stderr, task_params -def execute_display_command(started_task_name, timeout=0): +def execute_display_command(started_task_name, asidx=None, before_time=None, timeout=0): """Execute operator display command. Parameters ---------- started_task_name : str Name of the started task. + asidx : string + The HEX adress space identifier. + before_time: datetime + The timestamp when operation started. timeout : int Timeout to wait for the command execution, measured in centiseconds. @@ -581,7 +512,7 @@ def execute_display_command(started_task_name, timeout=0): display_response = opercmd.execute(cmd, timeout) task_params = [] if display_response.rc == 0 and display_response.stderr_response == "": - task_params = extract_keys(display_response.stdout_response) + task_params = extract_keys(display_response.stdout_response, asidx, before_time) return task_params @@ -708,6 +639,40 @@ def validate_and_prepare_start_command(module): return started_task_name, cmd +def fetch_task_name_and_asidx(module, task_id): + """Executes JLS command and fetches task name + + Parameters + ---------- + module : dict + The started task display command parameters. + task_id : str + The started task id starts with STC. + + Returns + ------- + task_name + The name of started task. + """ + try: + task_details = jobs.fetch(task_id) + if not isinstance(task_details, jobs.Job): + module.fail_json( + rc=1, + msg=f"Fetching started task details using task_id: {task_id} is failed", + changed=False + ) + except Exception as err: + module.fail_json( + rc=err.response.rc, + msg=f"Fetching started task details using task_id: {task_id} is failed with ZOAU error: {err.response.stderr_response}", + changed=False + ) + task_name = task_details.name + asidx = f"{task_details.asid:04X}" + return task_name, asidx + + def prepare_display_command(module): """Validates parameters and creates display command @@ -725,28 +690,40 @@ def prepare_display_command(module): """ identifier = module.params.get('identifier_name') job_name = module.params.get('job_name') + task_id = module.params.get('task_id') started_task_name = "" - if job_name: + task_name = asidx = "" + if task_id: + task_name, asidx = fetch_task_name_and_asidx(module, task_id) + if task_name: + started_task_name = task_name + elif job_name: started_task_name = job_name if identifier: started_task_name = f"{started_task_name}.{identifier}" else: module.fail_json( rc=5, - msg="job_name is missing which is mandatory to display started task details.", + msg="either job_name or task_id is mandatory to display started task details.", changed=False ) cmd = f"D A,{started_task_name}" - return started_task_name, cmd + return started_task_name, asidx, cmd -def prepare_stop_command(module): +def prepare_stop_command(module, started_task=None, asidx=None, duplicate_tasks=False): """Validates parameters and creates stop command Parameters ---------- module : dict The started task stop command parameters. + started_task: string + The started task name. + asidx : string + The HEX adress space identifier. + duplicate_tasks: bool + Indicates if duplicate tasks are running. Returns ------- @@ -757,31 +734,35 @@ def prepare_stop_command(module): """ identifier = module.params.get('identifier_name') job_name = module.params.get('job_name') - asid = module.params.get('asid') + asidx = module.params.get('asidx') or asidx started_task_name = "" - if job_name: + if started_task: + started_task_name = started_task + elif job_name: started_task_name = job_name if identifier: started_task_name = f"{started_task_name}.{identifier}" else: module.fail_json( rc=5, - msg="job_name is missing which is mandatory to display started task details.", + msg="either job_name or task_id is mandatory to stop a running started task.", changed=False ) cmd = f"P {started_task_name}" - if asid: - cmd = f"{cmd},A={asid}" + if asidx or duplicate_tasks: + cmd = f"{cmd},A={asidx}" return started_task_name, cmd -def prepare_modify_command(module): +def prepare_modify_command(module, started_task=None): """Validates parameters and creates modify command Parameters ---------- module : dict The started task modify command parameters. + started_task: string + The started task name. Returns ------- @@ -794,14 +775,16 @@ def prepare_modify_command(module): job_name = module.params.get('job_name') parameters = module.params.get('parameters') started_task_name = "" - if job_name: + if started_task: + started_task_name = started_task + elif job_name: started_task_name = job_name if identifier: started_task_name = f"{started_task_name}.{identifier}" else: module.fail_json( rc=5, - msg="job_name is missing which is mandatory to display started task details.", + msg="either job_name or task_id is mandatory to modify a running started task.", changed=False ) if parameters is None: @@ -814,13 +797,19 @@ def prepare_modify_command(module): return started_task_name, cmd -def prepare_cancel_command(module): +def prepare_cancel_command(module, started_task=None, asidx=None, duplicate_tasks=False): """Validates parameters and creates cancel command Parameters ---------- module : dict The started task modify command parameters. + started_task: string + The started task name. + asidx : string + The HEX adress space identifier. + duplicate_tasks: bool + Indicates if duplicate tasks are running. Returns ------- @@ -831,12 +820,14 @@ def prepare_cancel_command(module): """ identifier = module.params.get('identifier_name') job_name = module.params.get('job_name') - asid = module.params.get('asid') + asidx = module.params.get('asidx') or asidx dump = module.params.get('dump') armrestart = module.params.get('armrestart') userid = module.params.get('userid') started_task_name = "" - if job_name: + if started_task: + started_task_name = started_task + elif job_name: started_task_name = job_name if identifier: started_task_name = f"{started_task_name}.{identifier}" @@ -845,7 +836,7 @@ def prepare_cancel_command(module): else: module.fail_json( rc=5, - msg="Both job_name and userid are missing, one of them is needed to cancel a task.", + msg="job_name, task_id and userid are missing, one of them is needed to cancel a task.", changed=False ) if userid and armrestart: @@ -855,8 +846,8 @@ def prepare_cancel_command(module): changed=False ) cmd = f"C {started_task_name}" - if asid: - cmd = f"{cmd},A={asid}" + if asidx or duplicate_tasks: + cmd = f"{cmd},A={asidx}" if dump: cmd = f"{cmd},DUMP" if armrestart: @@ -864,13 +855,19 @@ def prepare_cancel_command(module): return started_task_name, cmd -def prepare_force_command(module): +def prepare_force_command(module, started_task=None, asidx=None, duplicate_tasks=False): """Validates parameters and creates force command Parameters ---------- module : dict The started task force command parameters. + started_task: string + The started task name. + asidx : string + The HEX adress space identifier. + duplicate_tasks: bool + Indicates if duplicate tasks are running. Returns ------- @@ -881,7 +878,7 @@ def prepare_force_command(module): """ identifier = module.params.get('identifier_name') job_name = module.params.get('job_name') - asid = module.params.get('asid') + asidx = module.params.get('asidx') or asidx arm = module.params.get('arm') armrestart = module.params.get('armrestart') userid = module.params.get('userid') @@ -905,7 +902,9 @@ def prepare_force_command(module): msg="The ARMRESTART parameter is not valid with the U=userid parameter.", changed=False ) - if job_name: + if started_task: + started_task_name = started_task + elif job_name: started_task_name = job_name if identifier: started_task_name = f"{started_task_name}.{identifier}" @@ -914,12 +913,12 @@ def prepare_force_command(module): else: module.fail_json( rc=5, - msg="Both job_name and userid are missing, one of them is needed to cancel a task.", + msg="job_name, task_id and userid are missing, one of them is needed to force stop a running started task.", changed=False ) cmd = f"FORCE {started_task_name}" - if asid: - cmd = f"{cmd},A={asid}" + if asidx or duplicate_tasks: + cmd = f"{cmd},A={asidx}" if arm: cmd = f"{cmd},ARM" if armrestart: @@ -931,13 +930,17 @@ def prepare_force_command(module): return started_task_name, cmd -def extract_keys(stdout): +def extract_keys(stdout, asidx=None, before_time=None): """Extracts keys and values from the given stdout Parameters ---------- stdout : string The started task display command output + asidx : string + The HEX adress space identifier. + before_time: datetime + The timestamp when operation started. Returns ------- @@ -945,25 +948,10 @@ def extract_keys(stdout): The list of task parameters. """ keys = { - 'A': 'asid', + 'A': 'asidx', 'CT': 'cpu_time', 'ET': 'elapsed_time', - 'WUID': 'work_unit_identifier', - 'USERID': 'userid', - 'P': 'priority', - 'PER': 'program_event_recording', - 'SMC': 'system_management_control', - 'PGN': 'program_name', - 'SCL': 'started_class_list', - 'WKL': 'workload_manager', - 'ASTE': 'data_space_address_entry', - 'ADDR SPACE ASTE': 'address_space_second_table_entry', - 'RGP': 'resource_group', - 'DSPNAME': 'data_space_name', - 'DMN': 'domain_number', - 'AFF': 'affinity', - 'SRVR': 'server', - 'QSC': 'queue_scan_count' + 'WUID': 'task_id' } lines = stdout.strip().split('\n') tasks = [] @@ -979,46 +967,58 @@ def extract_keys(stdout): el_time = current_task.get('elapsed_time') if el_time: current_task['started_time'] = calculate_start_time(el_time) - tasks.append(current_task) + if asidx: + if asidx == current_task.get('asidx'): + tasks.append(current_task) + current_task = {} + break + elif before_time: + if before_time < datetime.fromisoformat(current_task.get('started_time')): + tasks.append(current_task) + else: + tasks.append(current_task) current_task = {} current_task['task_name'] = match_firstline.group(1) current_task['task_identifier'] = match_firstline.group(2) - if "=" not in match_firstline.group(5): - current_task['proc_step_name'] = match_firstline.group(3) - current_task['task_type'] = match_firstline.group(4) - current_task['task_status'] = match_firstline.group(5) - else: - current_task['task_type'] = match_firstline.group(3) - current_task['task_status'] = match_firstline.group(4) for match in kv_pattern.finditer(line): key, value = match.groups() if key in keys: key = keys[key] - current_task[key.lower()] = value + current_task[key.lower()] = value elif current_task: - data_space = {} for match in kv_pattern.finditer(line): - dsp_keys = ['data_space_name', 'data_space_address_entry'] key, value = match.groups() if key in keys: key = keys[key] - if key in dsp_keys: - data_space[key] = value - else: current_task[key.lower()] = value - if current_task.get("dataspaces"): - current_task["dataspaces"] = current_task["dataspaces"] + [data_space] - elif data_space: - current_task["dataspaces"] = [data_space] if current_task: el_time = current_task.get('elapsed_time') if el_time: current_task['started_time'] = calculate_start_time(el_time) - tasks.append(current_task) + if asidx: + if asidx == current_task.get('asidx'): + tasks.append(current_task) + elif before_time: + if before_time < datetime.fromisoformat(current_task.get('started_time')): + tasks.append(current_task) + else: + tasks.append(current_task) return tasks def parse_time(ts_str): + """Parse timestamp + + Parameters + ---------- + ts_str : string + The time stamp in string format + + Returns + ------- + timestamp + Transformed timestamp + """ # Case 1: Duration like "000.005seconds" sec_match = re.match(r"^(\d+\.?\d*)\s*S?$", ts_str, re.IGNORECASE) if sec_match: @@ -1082,6 +1082,13 @@ def run_module(): ------ fail_json z/OS started task operation failed. + + Note: + 5 arguments(device_number, device_type, volume, retry_force, tcb_address) are commented due to + not tested those values in positive scenarios. These options will be enabled after successful + testing. Below Git issues are created to track this. + https://github.com/ansible-collections/ibm_zos_core/issues/2339 + https://github.com/ansible-collections/ibm_zos_core/issues/2340 """ module = AnsibleModule( argument_spec={ @@ -1098,18 +1105,18 @@ def run_module(): 'type': 'bool', 'required': False }, - 'asid': { - 'type': 'str', - 'required': False - }, - 'device_number': { - 'type': 'str', - 'required': False - }, - 'device_type': { + 'asidx': { 'type': 'str', 'required': False }, + # 'device_number': { + # 'type': 'str', + # 'required': False + # }, + # 'device_type': { + # 'type': 'str', + # 'required': False + # }, 'dump': { 'type': 'bool', 'required': False @@ -1143,10 +1150,10 @@ def run_module(): 'elements': 'str', 'required': False }, - 'retry_force': { - 'type': 'bool', - 'required': False - }, + # 'retry_force': { + # 'type': 'bool', + # 'required': False + # }, 'reus_asid': { 'type': 'bool', 'required': False @@ -1155,10 +1162,14 @@ def run_module(): 'type': 'str', 'required': False }, - 'tcb_address': { + 'task_id': { 'type': 'str', 'required': False }, + # 'tcb_address': { + # 'type': 'str', + # 'required': False + # }, 'userid': { 'type': 'str', 'required': False @@ -1168,10 +1179,10 @@ def run_module(): 'required': False, 'default': False }, - 'volume': { - 'type': 'str', - 'required': False - }, + # 'volume': { + # 'type': 'str', + # 'required': False + # }, 'wait_time': { 'type': 'int', 'required': False, @@ -1179,9 +1190,11 @@ def run_module(): } }, mutually_exclusive=[ - ['device_number', 'device_type'] + # ['device_number', 'device_type'], + ['job_name', 'task_id'], + ['identifier_name', 'task_id'] ], - required_by={'retry_force': ['tcb_address']}, + # required_by={'retry_force': ['tcb_address']}, supports_check_mode=True ) @@ -1198,18 +1211,18 @@ def run_module(): 'arg_type': 'bool', 'required': False }, - 'asid': { - 'arg_type': 'str', - 'required': False - }, - 'device_number': { - 'arg_type': 'str', - 'required': False - }, - 'device_type': { + 'asidx': { 'arg_type': 'str', 'required': False }, + # 'device_number': { + # 'arg_type': 'str', + # 'required': False + # }, + # 'device_type': { + # 'arg_type': 'str', + # 'required': False + # }, 'dump': { 'arg_type': 'bool', 'required': False @@ -1242,10 +1255,10 @@ def run_module(): 'elements': 'str', 'required': False }, - 'retry_force': { - 'arg_type': 'bool', - 'required': False - }, + # 'retry_force': { + # 'arg_type': 'bool', + # 'required': False + # }, 'reus_asid': { 'arg_type': 'bool', 'required': False @@ -1254,10 +1267,14 @@ def run_module(): 'arg_type': 'str', 'required': False }, - 'tcb_address': { - 'arg_type': 'str', + 'task_id': { + 'type': 'str', 'required': False }, + # 'tcb_address': { + # 'arg_type': 'str', + # 'required': False + # }, 'userid': { 'arg_type': 'str', 'required': False @@ -1266,10 +1283,10 @@ def run_module(): 'arg_type': 'bool', 'required': False }, - 'volume': { - 'arg_type': 'str', - 'required': False - }, + # 'volume': { + # 'arg_type': 'str', + # 'required': False + # }, 'wait_time': { 'arg_type': 'int', 'required': False @@ -1290,6 +1307,28 @@ def run_module(): wait_time_s = module.params.get('wait_time') verbose = module.params.get('verbose') kwargs = {} + # Fetch started task name if task_id is present in the request + task_id = module.params.get('task_id') + task_name = "" + asidx = module.params.get('asidx') + duplicate_tasks = False + started_task_name_from_id = "" + task_info = [] + if task_id and state != "displayed": + task_name, asidx = fetch_task_name_and_asidx(module, task_id) + task_params = execute_display_command(task_name) + if len(task_params) > 1: + duplicate_tasks = True + for task in task_params: + if task['asidx'] == asidx: + task_info.append(task) + started_task_name_from_id = f"{task['task_name']}.{task['task_identifier']}" + if not started_task_name_from_id: + module.fail_json( + rc=1, + msg="Started task of the given task_id is not active.", + changed=False + ) """ Below error messages or error codes are used to determine if response has any error. @@ -1330,38 +1369,42 @@ def run_module(): kwargs.update({"wait": True}) cmd = "" - + before_time = None execute_display_before = False execute_display_after = False if state == "started": + before_time = datetime.now().astimezone() err_msg = start_errmsg execute_display_after = True started_task_name, cmd = validate_and_prepare_start_command(module) elif state == "displayed": err_msg = display_errmsg - started_task_name, cmd = prepare_display_command(module) + started_task_name, asidx, cmd = prepare_display_command(module) elif state == "stopped": - execute_display_before = True + if not task_id: + execute_display_before = True err_msg = stop_errmsg - started_task_name, cmd = prepare_stop_command(module) + started_task_name, cmd = prepare_stop_command(module, started_task_name_from_id, asidx, duplicate_tasks) elif state == "cancelled": if not userid: - execute_display_before = True + if not task_id: + execute_display_before = True err_msg = cancel_errmsg - started_task_name, cmd = prepare_cancel_command(module) + started_task_name, cmd = prepare_cancel_command(module, started_task_name_from_id, asidx, duplicate_tasks) elif state == "forced": if not userid: - execute_display_before = True + if not task_id: + execute_display_before = True err_msg = force_errmsg - started_task_name, cmd = prepare_force_command(module) + started_task_name, cmd = prepare_force_command(module, started_task_name_from_id, asidx, duplicate_tasks) elif state == "modified": execute_display_after = True err_msg = modify_errmsg - started_task_name, cmd = prepare_modify_command(module) + started_task_name, cmd = prepare_modify_command(module, started_task_name_from_id) changed = False stdout = "" stderr = "" - rc, out, err, task_params = execute_command(cmd, started_task_name, execute_display_before, timeout_s=wait_time_s, **kwargs) + rc, out, err, task_params = execute_command(cmd, started_task_name, asidx, execute_display_before, timeout_s=wait_time_s, **kwargs) is_failed = False system_logs = "" msg = "" @@ -1396,9 +1439,9 @@ def run_module(): stdout = out stderr = err if state == "displayed": - task_params = extract_keys(out) + task_params = extract_keys(out, asidx) elif execute_display_after: - task_params = execute_display_command(started_task_name) + task_params = execute_display_command(started_task_name, asidx, before_time) result = dict() @@ -1409,7 +1452,7 @@ def run_module(): changed=changed, state=current_state, cmd=cmd, - tasks=task_params, + tasks=task_info if task_id else task_params, rc=rc, stdout=stdout, stderr=stderr, diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index b0ec69e9ad..f088db9dfe 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -29,15 +29,12 @@ //STDPARM DD * SH sleep 600 /*""" -PROC_JCL_CONTENT="""//TESTERS PROC -//TEST JOB MSGCLASS=A,NOTIFY=&SYSUID -//STEP1 EXEC PGM=BPXBATCH,PARM='SH' -//STDOUT DD SYSOUT=* -//STDERR DD SYSOUT=* -//STDPARM DD *,SYMBOLS=EXECSYS -SH sleep 60 -/* -//PEND""" +PARAM_JCL_CONTENT="""//MSLEEP PROC SECS=10 +//STEP1 EXEC PGM=BPXBATCH, +// PARM='SH sleep &SECS' +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" # Input arguments validation def test_start_task_with_invalid_member(ansible_zos_module): @@ -115,31 +112,31 @@ def test_start_task_with_invalid_jobaccount(ansible_zos_module): assert result.get("failed") is True assert result.get("msg") is not None -def test_start_task_with_invalid_devicenum(ansible_zos_module): - hosts = ansible_zos_module - # validate invalid devicenum with non-existing member - start_results = hosts.all.zos_started_task( - state = "started", - member_name = "SAMPLE", - device_number = "0870" - ) - for result in start_results.contacted.values(): - assert result.get("changed") is False - assert result.get("failed") is True - assert result.get("msg") is not None +# def test_start_task_with_invalid_devicenum(ansible_zos_module): +# hosts = ansible_zos_module +# # validate invalid devicenum with non-existing member +# start_results = hosts.all.zos_started_task( +# state = "started", +# member_name = "SAMPLE", +# device_number = "0870" +# ) +# for result in start_results.contacted.values(): +# assert result.get("changed") is False +# assert result.get("failed") is True +# assert result.get("msg") is not None -def test_start_task_with_invalid_volumeserial(ansible_zos_module): - hosts = ansible_zos_module - start_results = hosts.all.zos_started_task( - state = "started", - member_name = "SAMPLE", - volume = "12345A" - ) - for result in start_results.contacted.values(): - assert result.get("changed") is False - assert result.get("stderr") is not None - assert result.get("cmd") == "S SAMPLE,,12345A" - assert result.get("msg") is not None +# def test_start_task_with_invalid_volumeserial(ansible_zos_module): +# hosts = ansible_zos_module +# start_results = hosts.all.zos_started_task( +# state = "started", +# member_name = "SAMPLE", +# volume = "12345A" +# ) +# for result in start_results.contacted.values(): +# assert result.get("changed") is False +# assert result.get("stderr") is not None +# assert result.get("cmd") == "S SAMPLE,,12345A" +# assert result.get("msg") is not None def test_start_task_with_invalid_parameters(ansible_zos_module): hosts = ansible_zos_module @@ -157,27 +154,26 @@ def test_start_task_with_invalid_parameters(ansible_zos_module): start_results = hosts.all.zos_started_task( state = "started", member_name = "SAMPLE", - parameters = ["KEY1", "KEY2", "KEY3"], - volume = "123456" + parameters = ["KEY1", "KEY2", "KEY3"] ) for result in start_results.contacted.values(): assert result.get("changed") is False assert result.get("stderr") is not None - assert result.get("cmd") == "S SAMPLE,,123456,(KEY1,KEY2,KEY3)" + assert result.get("cmd") == "S SAMPLE,,,(KEY1,KEY2,KEY3)" assert result.get("msg") is not None -def test_start_task_with_devicenum_devicetype_negative(ansible_zos_module): - hosts = ansible_zos_module - start_results = hosts.all.zos_started_task( - state = "started", - member_name = "SAMPLE", - device_number = "/0870", - device_type = "TEST" - ) - for result in start_results.contacted.values(): - assert result.get("changed") is False - assert result.get("failed") is True - assert result.get("msg") is not None +# def test_start_task_with_devicenum_devicetype_negative(ansible_zos_module): +# hosts = ansible_zos_module +# start_results = hosts.all.zos_started_task( +# state = "started", +# member_name = "SAMPLE", +# device_number = "/0870", +# device_type = "TEST" +# ) +# for result in start_results.contacted.values(): +# assert result.get("changed") is False +# assert result.get("failed") is True +# assert result.get("msg") is not None def test_start_task_with_invalid_subsystem_negative(ansible_zos_module): @@ -235,19 +231,19 @@ def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): assert result.get("verbose_output") == "" -def test_start_task_using_nonexisting_devicenum_negative(ansible_zos_module): - hosts = ansible_zos_module - start_results = hosts.all.zos_started_task( - state = "started", - member_name = "SAMPLE", - device_number = "/ABCD" - ) - for result in start_results.contacted.values(): - assert result.get("changed") is False - assert result.get("stderr") is not None - assert result.get("cmd") == 'S SAMPLE,/ABCD' - assert result.get("msg") is not None - assert result.get("verbose_output") == "" +# def test_start_task_using_nonexisting_devicenum_negative(ansible_zos_module): +# hosts = ansible_zos_module +# start_results = hosts.all.zos_started_task( +# state = "started", +# member_name = "SAMPLE", +# device_number = "/ABCD" +# ) +# for result in start_results.contacted.values(): +# assert result.get("changed") is False +# assert result.get("stderr") is not None +# assert result.get("cmd") == 'S SAMPLE,/ABCD' +# assert result.get("msg") is not None +# assert result.get("verbose_output") == "" def test_display_task_negative(ansible_zos_module): hosts = ansible_zos_module @@ -341,7 +337,7 @@ def test_cancel_task_negative(ansible_zos_module): cancel_results = hosts.all.zos_started_task( state = "cancelled", - asid = "0012", + asidx = "0012", userid = "OMVSTEST", dump = True, verbose=True @@ -392,48 +388,48 @@ def test_force_task_negative(ansible_zos_module): assert result.get("failed") is True assert result.get("msg") is not None - force_results = hosts.all.zos_started_task( - state = "forced", - job_name = "TESTER", - retry_force = True - ) - for result in force_results.contacted.values(): - assert result.get("changed") is False - assert result.get("failed") is True - assert result.get("msg") is not None - force_results = hosts.all.zos_started_task( - state = "forced", - job_name = "TESTER", - tcb_address = "0006789", - retry_force = True - ) - for result in force_results.contacted.values(): - assert result.get("changed") is False - assert result.get("failed") is True - assert result.get("msg") is not None - force_results = hosts.all.zos_started_task( - state = "forced", - job_name = "TESTER", - identifier = "SAMPLE", - tcb_address = "000678", - retry_force = True - ) - for result in force_results.contacted.values(): - assert result.get("changed") is False - assert result.get("stderr") is not None - assert result.get("cmd") == "FORCE TESTER.SAMPLE,TCB=000678,RETRY=YES" - force_results = hosts.all.zos_started_task( - state = "forced", - userid = "OMVSTEST", - tcb_address = "000678", - retry_force = True, - verbose=True - ) - for result in force_results.contacted.values(): - assert result.get("changed") is False - assert result.get("stderr") is not None - assert result.get("cmd") == "FORCE U=OMVSTEST,TCB=000678,RETRY=YES" - assert result.get("verbose_output") != "" + # force_results = hosts.all.zos_started_task( + # state = "forced", + # job_name = "TESTER", + # retry_force = True + # ) + # for result in force_results.contacted.values(): + # assert result.get("changed") is False + # assert result.get("failed") is True + # assert result.get("msg") is not None + # force_results = hosts.all.zos_started_task( + # state = "forced", + # job_name = "TESTER", + # tcb_address = "0006789", + # retry_force = True + # ) + # for result in force_results.contacted.values(): + # assert result.get("changed") is False + # assert result.get("failed") is True + # assert result.get("msg") is not None + # force_results = hosts.all.zos_started_task( + # state = "forced", + # job_name = "TESTER", + # identifier = "SAMPLE", + # tcb_address = "000678", + # retry_force = True + # ) + # for result in force_results.contacted.values(): + # assert result.get("changed") is False + # assert result.get("stderr") is not None + # assert result.get("cmd") == "FORCE TESTER.SAMPLE,TCB=000678,RETRY=YES" + # force_results = hosts.all.zos_started_task( + # state = "forced", + # userid = "OMVSTEST", + # tcb_address = "000678", + # retry_force = True, + # verbose=True + # ) + # for result in force_results.contacted.values(): + # assert result.get("changed") is False + # assert result.get("stderr") is not None + # assert result.get("cmd") == "FORCE U=OMVSTEST,TCB=000678,RETRY=YES" + # assert result.get("verbose_output") != "" def test_start_and_cancel_zos_started_task(ansible_zos_module): @@ -535,6 +531,18 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): job_account = job_account ) + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") == "" + start_results = hosts.all.zos_started_task( + state = "started", + member = "SAMPLE", + job_account = job_account + ) + for result in start_results.contacted.values(): assert result.get("changed") is True assert result.get("rc") == 0 @@ -559,7 +567,19 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): stop_results = hosts.all.zos_started_task( state = "cancelled", task_name = "SAMPLE", - asid = asid_val, + asidx = asid_val, + verbose=True + ) + + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") != "" + stop_results = hosts.all.zos_started_task( + state = "cancelled", + task_name = "SAMPLE", verbose=True ) @@ -575,9 +595,9 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): hosts.all.shell( cmd="drm {0}".format(data_set_name) ) - hosts.all.shell( - cmd="mrm '{0}(SAMPLE)'".format(PROC_PDS) - ) + # hosts.all.shell( + # cmd="mrm '{0}(SAMPLE)'".format(PROC_PDS) + # ) def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): try: @@ -661,7 +681,7 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): stop_results = hosts.all.zos_started_task( state = "stopped", task = "VLF", - asid = asid_val + asidx = asid_val ) for result in stop_results.contacted.values(): assert result.get("changed") is True @@ -794,19 +814,19 @@ def test_force_and_start_with_icsf_task(ansible_zos_module): assert result.get("stderr") != "" assert len(result.get("tasks")) > 0 - asid = result.get("tasks")[0].get("asid") + asidx = result.get("tasks")[0].get("asidx") force_results = hosts.all.zos_started_task( state = "forced", task = "ICSF", identifier = "ICSF", - asid = asid, + asidx = asidx, arm = True ) for result in force_results.contacted.values(): assert result.get("changed") is True assert result.get("rc") == 0 assert result.get("stderr") == "" - assert result.get("cmd") == f"FORCE ICSF.ICSF,A={asid},ARM" + assert result.get("cmd") == f"FORCE ICSF.ICSF,A={asidx},ARM" start_results = hosts.all.zos_started_task( state = "started", @@ -819,6 +839,255 @@ def test_force_and_start_with_icsf_task(ansible_zos_module): assert result.get("cmd") == "S ICSF" assert len(result.get("tasks")) > 0 +def test_start_with_keyword_param_and_cancel_zos_started_task(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) + hosts.all.file(path=temp_path, state="directory") + + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(PARAM_JCL_CONTENT), temp_path) + ) + + hosts.all.shell( + cmd="dcp {0}/SAMPLE {1}".format(temp_path, data_set_name) + ) + + hosts.all.shell( + cmd="dcp {0} \"//'{1}(MSLEEP)'\"".format(data_set_name, PROC_PDS) + ) + display_results = hosts.all.zos_started_task( + state = "displayed", + task = "MSLEEP" + ) + for result in display_results.contacted.values(): + assert result.get("changed") is False + assert result.get("rc") == 1 + assert len(result.get("tasks")) == 0 + assert result.get("stderr") != "" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "MSLEEP", + keyword_parameters = {"SECS": "60"}, + verbose = True + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "MSLEEP", + keyword_parameters = {"SECS": "80"}, + verbose = True + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + + display_results = hosts.all.zos_started_task( + state = "displayed", + task = "MSLEEP" + ) + + for result in display_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 2 + assert result.get("stderr") == "" + + display_output = list(display_results.contacted.values())[0].get("stdout") + asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) + stop_results = hosts.all.zos_started_task( + state = "cancelled", + task = "MSLEEP", + asidx = asid_val + ) + + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + + display_results = hosts.all.zos_started_task( + state = "displayed", + task = "MSLEEP" + ) + + for result in display_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + finally: + hosts.all.file(path=temp_path, state="absent") + hosts.all.shell( + cmd="drm {0}".format(data_set_name) + ) + hosts.all.shell( + cmd="mrm '{0}(MSLEEP)'".format(PROC_PDS) + ) +def test_start_and_cancel_zos_started_task_using_task_id(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) + hosts.all.file(path=temp_path, state="directory") + + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(PARAM_JCL_CONTENT), temp_path) + ) + + hosts.all.shell( + cmd="dcp {0}/SAMPLE {1}".format(temp_path, data_set_name) + ) + + hosts.all.shell( + cmd="dcp {0} \"//'{1}(MSLEEP)'\"".format(data_set_name, PROC_PDS) + ) + display_results = hosts.all.zos_started_task( + state = "displayed", + task_id = "STCABCDEF" + ) + for result in display_results.contacted.values(): + assert result.get("changed") is False + assert result.get("rc") == 4 + assert result.get("msg") != "" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "MSLEEP", + keyword_parameters = {"SECS": "60"}, + verbose = True + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "MSLEEP", + keyword_parameters = {"SECS": "80"}, + verbose = True + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + + display_results = hosts.all.zos_started_task( + state = "displayed", + task = "MSLEEP" + ) + + for result in display_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 2 + assert result.get("stderr") == "" + + display_output = list(display_results.contacted.values())[0].get("stdout") + task_id = re.search(r"\bWUID=([^ \n\r\t]+)", display_output).group(1) + stop_results = hosts.all.zos_started_task( + state = "cancelled", + task_id = task_id + ) + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + display_results = hosts.all.zos_started_task( + state = "displayed", + task = "MSLEEP" + ) + + for result in display_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + finally: + hosts.all.file(path=temp_path, state="absent") + hosts.all.shell( + cmd="drm {0}".format(data_set_name) + ) + hosts.all.shell( + cmd="mrm '{0}(MSLEEP)'".format(PROC_PDS) + ) + +def test_stop_and_force_with_ICSF_task_using_task_id(ansible_zos_module): + hosts = ansible_zos_module + display_result = hosts.all.zos_started_task( + state = "displayed", + task = "ICSF" + ) + for result in display_result.contacted.values(): + task_id = result.get('tasks')[0]['task_id'] + asid_val = result.get('tasks')[0]['asidx'] + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") == "" + stop_results = hosts.all.zos_started_task( + state = "stopped", + task_id = task_id + ) + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + assert result.get("cmd") == f"P ICSF.ICSF,A={asid_val}" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "ICSF" + ) + for result in start_results.contacted.values(): + task_id = result.get('tasks')[0]['task_id'] + asid_val = result.get('tasks')[0]['asidx'] + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + force_results = hosts.all.zos_started_task( + state = "forced", + task_id = task_id, + arm = True + ) + for result in force_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + assert result.get("cmd") == f"FORCE ICSF.ICSF,A={asid_val},ARM" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "ICSF" + ) + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + # This testcase will be successful when a TSO session with user 'OMVSADM' is open. # def test_cancel_using_userid(ansible_zos_module): # hosts = ansible_zos_module From 1d6e151c6281bc6d65d5db9ecacfe374743aec23 Mon Sep 17 00:00:00 2001 From: Fernando Flores Date: Wed, 8 Oct 2025 14:26:19 -0600 Subject: [PATCH 71/73] Generated RSTs --- docs/source/modules/zos_apf.rst | 33 ++- docs/source/modules/zos_archive.rst | 58 +++-- docs/source/modules/zos_backup_restore.rst | 209 +++++++++++++++- docs/source/modules/zos_blockinfile.rst | 12 + docs/source/modules/zos_copy.rst | 28 +-- docs/source/modules/zos_data_set.rst | 30 +++ docs/source/modules/zos_encode.rst | 21 ++ docs/source/modules/zos_fetch.rst | 21 +- docs/source/modules/zos_find.rst | 40 ++-- docs/source/modules/zos_job_output.rst | 102 ++++---- docs/source/modules/zos_job_query.rst | 85 +++++-- docs/source/modules/zos_job_submit.rst | 128 +++++----- docs/source/modules/zos_lineinfile.rst | 22 +- docs/source/modules/zos_mount.rst | 51 ++-- docs/source/modules/zos_operator.rst | 34 ++- .../modules/zos_operator_action_query.rst | 42 ++-- docs/source/modules/zos_replace.rst | 2 +- docs/source/modules/zos_script.rst | 1 + docs/source/modules/zos_started_task.rst | 223 +++--------------- docs/source/modules/zos_unarchive.rst | 26 +- 20 files changed, 698 insertions(+), 470 deletions(-) diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst index 215de08519..8d06c883df 100644 --- a/docs/source/modules/zos_apf.rst +++ b/docs/source/modules/zos_apf.rst @@ -106,7 +106,7 @@ tmp_hlq persistent - Add/remove persistent entries to or from *data_set_name* + Add/remove persistent entries to or from *target* ``library`` will not be persisted or removed if ``persistent=None`` @@ -114,7 +114,7 @@ persistent | **type**: dict - data_set_name + target The data set name used for persisting or removing a ``library`` from the APF list. | **required**: True @@ -138,7 +138,7 @@ persistent backup - Creates a backup file or backup data set for *data_set_name*, including the timestamp information to ensure that you retrieve the original APF list defined in *data_set_name*". + Creates a backup file or backup data set for *target*, including the timestamp information to ensure that you retrieve the original APF list defined in *target*". *backup_name* can be used to specify a backup file name if *backup=true*. @@ -152,7 +152,7 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source *data_set_name* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. + If the source *target* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. If the source is an MVS data set, the backup_name must be an MVS data set name. @@ -241,18 +241,18 @@ Examples library: SOME.SEQUENTIAL.DATASET force_dynamic: true persistent: - data_set_name: SOME.PARTITIONED.DATASET(MEM) + target: SOME.PARTITIONED.DATASET(MEM) - name: Remove a library from the APF list and persistence zos_apf: state: absent library: SOME.SEQUENTIAL.DATASET volume: T12345 persistent: - data_set_name: SOME.PARTITIONED.DATASET(MEM) + target: SOME.PARTITIONED.DATASET(MEM) - name: Batch libraries with custom marker, persistence for the APF list zos_apf: persistent: - data_set_name: "SOME.PARTITIONED.DATASET(MEM)" + target: "SOME.PARTITIONED.DATASET(MEM)" marker: "/* {mark} PROG001 USR0010 */" batch: - library: SOME.SEQ.DS1 @@ -304,6 +304,12 @@ stdout | **returned**: always | **type**: str +stdout_lines + List of strings containing individual lines from STDOUT. + + | **returned**: always + | **type**: list + stderr The error messages from ZOAU command apfadm @@ -311,6 +317,19 @@ stderr | **type**: str | **sample**: BGYSC1310E ADD Error: Dataset COMMON.LINKLIB volume COMN01 is already present in APF list. +stderr_lines + List of strings containing individual lines from STDERR. + + | **returned**: always + | **type**: list + | **sample**: + + .. code-block:: json + + [ + "BGYSC1310E ADD Error: Dataset COMMON.LINKLIB volume COMN01 is already present in APF list." + ] + rc The return code from ZOAU command apfadm diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index 2a51654019..dc742455b8 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -55,7 +55,7 @@ format | **type**: dict - name + type The compression format to use. | **required**: False @@ -64,15 +64,15 @@ format | **choices**: bz2, gz, tar, zip, terse, xmit, pax - format_options + options Options specific to a compression format. | **required**: False | **type**: dict - terse_pack - Compression option for use with the terse format, *name=terse*. + spack + Compression option for use with the terse format, *type=terse*. Pack will compress records in a data set so that the output results in lossless data compression. @@ -81,8 +81,8 @@ format Spack will produce smaller output and take approximately 3 times longer than pack compression. | **required**: False - | **type**: str - | **choices**: pack, spack + | **type**: bool + | **default**: True xmit_log_data_set @@ -98,7 +98,7 @@ format | **type**: str - use_adrdssu + adrdssu If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to compress data sets into a portable format before using ``xmit`` or ``terse``. | **required**: False @@ -407,7 +407,7 @@ Examples src: /tmp/archive/foo.txt dest: /tmp/archive/foo_archive_test.tar format: - name: tar + type: tar # Archive multiple files - name: Archive list of files into a zip @@ -417,7 +417,7 @@ Examples - /tmp/archive/bar.txt dest: /tmp/archive/foo_bar_archive_test.zip format: - name: zip + type: zip # Archive one data set into terse - name: Archive data set into a terse @@ -425,7 +425,7 @@ Examples src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse # Use terse with different options - name: Archive data set into a terse, specify pack algorithm and use adrdssu @@ -433,10 +433,10 @@ Examples src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - terse_pack: "spack" - use_adrdssu: true + type: terse + options: + spack: true + adrdssu: true # Use a pattern to store - name: Archive data set pattern using xmit @@ -445,7 +445,7 @@ Examples exclude_sources: "USER.ARCHIVE.EXCLUDE.*" dest: "USER.ARCHIVE.RESULT.XMIT" format: - name: xmit + type: xmit - name: Archive multiple GDSs into a terse zos_archive: @@ -455,25 +455,25 @@ Examples - "USER.GDG(-2)" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true - name: Archive multiple data sets into a new GDS zos_archive: src: "USER.ARCHIVE.*" dest: "USER.GDG(+1)" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true - name: Encode the source data set into Latin-1 before archiving into a terse data set zos_archive: src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse encoding: from: IBM-1047 to: ISO8859-1 @@ -485,9 +485,9 @@ Examples - "USER.ARCHIVE2.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true encoding: from: IBM-1047 to: ISO8859-1 @@ -503,7 +503,7 @@ Notes .. note:: This module does not perform a send or transmit operation to a remote node. If you want to transport the archive you can use zos_fetch to retrieve to the controller and then zos_copy or zos_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. - When packing and using ``use_adrdssu`` flag the module will take up to two times the space indicated in ``dest_data_set``. + When packing and using ``adrdssu`` flag the module will take up to two times the space indicated in ``dest_data_set``. tar, zip, bz2 and pax are archived using python ``tarfile`` library which uses the latest version available for each format, for compatibility when opening from system make sure to use the latest available version for the intended format. @@ -524,6 +524,12 @@ Return Values ------------- +dest + The remote absolute path or data set where the archive was created. + + | **returned**: always + | **type**: str + state The state of the input ``src``. diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index 26fc889d37..d99e8a9247 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -31,6 +31,52 @@ Parameters ---------- +access + Specifies how the module will access data sets and z/OS UNIX files when performing a backup or restore operation. + + | **required**: False + | **type**: dict + + + share + Specifies that the module allow data set read access to other programs while backing up or restoring. + + *share* and ``full_volume`` are mutually exclusive; you cannot use both. + + Option *share*is conditionally supported for *operation=backup* or *operation=restore*. + + When *operation=backup*, and source backup is a VSAM data set, the option is only supported for VSAM data sets which are not defined with VSAM SHAREOPTIONS (1,3) or (1,4). - When *operation=restore*, and restore target is a VSAM data set or PDSE data set, this option is not supported. Both data set types will be accessed exlusivly preventing reading or writing to the VSAM, PDSE, or PDSE members. + + The SHAREOPTIONS for VSAM data sets. + + (1) the data set can be shared by multiple programs for read-only processing, or a single program for read and write processing. + + (2) the data set can be accessed by multiple programs for read-only processing, and can also be accessed by a program for write processing. + + (3) the data set can be shared by multiple programs where each program is responsible for maintaining both read and write data integrity. + + (4) the data set can be shared by multiple programs where each program is responsible for maintaining both read and write data integrity differing from (3) in that I/O buffers are updated for each request. + + | **required**: False + | **type**: bool + | **default**: False + + + auth + *auth=true* allows you to act as an administrator, where it will disable checking the current users privileges for z/OS UNIX files, data sets and catalogs. + + This is option is supported both, *operation=backup* and *operation=restore*. + + If you are not authorized to use this option, the module ends with an error message. + + Some authorization checking for data sets is unavoidable, when when *auth* is specified because some checks are initiated by services and programs invoked by this module which can not be bypassed. + + | **required**: False + | **type**: bool + | **default**: False + + + operation Used to specify the operation to perform. @@ -150,26 +196,99 @@ overwrite | **default**: False -sms_storage_class - When *operation=restore*, specifies the storage class to use. The storage class will also be used for temporary data sets created during restore process. +compress + When *operation=backup*, enables compression of partitioned data sets using system-level compression features. If supported, this may utilize zEDC hardware compression. - When *operation=backup*, specifies the storage class to use for temporary data sets created during backup process. + This option can reduce the size of the temporary dataset generated during backup operations either before the AMATERSE step when *terse* is True or the resulting backup when *terse* is False. + + | **required**: False + | **type**: bool + | **default**: False - If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + +terse + When *operation=backup*, executes an AMATERSE step to compress and pack the temporary data set for the backup. This creates a backup with a format suitable for transferring off-platform. + + If *operation=backup* and if *dataset=False* then option *terse* must be True. | **required**: False - | **type**: str + | **type**: bool + | **default**: True -sms_management_class - When *operation=restore*, specifies the management class to use. The management class will also be used for temporary data sets created during restore process. +sms + Specifies how System Managed Storage (SMS) interacts with the storage class and management class when either backup or restore operations are occurring. - When *operation=backup*, specifies the management class to use for temporary data sets created during backup process. + Storage class contains performance and availability attributes related to the storage occupied by the data set. A data set that has a storage class assigned to it is defined as an 'SMS-managed' data set. - If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + Management class contains the data set attributes related to the migration and backup of the data set and the expiration date of the data set. A management class can be assigned only to a data set that also has a storage class assigned. | **required**: False - | **type**: str + | **type**: dict + + + storage_class + When *operation=restore*, specifies the storage class to use. The storage class will also be used for temporary data sets created during restore process. + + When *operation=backup*, specifies the storage class to use for temporary data sets created during backup process. + + If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + + | **required**: False + | **type**: str + + + management_class + When *operation=restore*, specifies the management class to use. The management class will also be used for temporary data sets created during restore process. + + When *operation=backup*, specifies the management class to use for temporary data sets created during backup process. + + If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + + | **required**: False + | **type**: str + + + disable_automatic_class + Specifies that the automatic class selection (ACS) routines will not be used to determine the target data set class names for the provided list. + + The list must contain fully or partially qualified data set names. + + To include all selected data sets, "**" in a list. + + You must have READ access to RACF FACILITY class profile `STGADMIN.ADR.RESTORE.BYPASSACS` to use this option. + + | **required**: False + | **type**: list + | **elements**: str + | **default**: [] + + + disable_automatic_storage_class + Specifies that automatic class selection (ACS) routines will not be used to determine the source data set storage class. + + Enabling *disable_automatic_storage_class* ensures ACS is null. + + *storage_class* and *disable_automatic_storage_class* are mutually exclusive; you cannot use both. + + The combination of *disable_automatic_storage_class* and ``disable_automatic_class=[dsn,dsn1,...]`` ensures the selected data sets will not be SMS-managed. + + | **required**: False + | **type**: bool + | **default**: False + + + disable_automatic_management_class + Specifies that automatic class selection (ACS) routines will not be used to determine the source data set management class. + + Enabling *disable_automatic_storage_class* ensures ACS is null. + + *management_class* and *disable_automatic_management_class* are mutually exclusive; you cannot use both. + + | **required**: False + | **type**: bool + | **default**: False + space @@ -194,6 +313,7 @@ space_type | **required**: False | **type**: str + | **default**: m | **choices**: k, m, g, cyl, trk @@ -217,6 +337,18 @@ tmp_hlq | **type**: str +index + When ``operation=backup`` specifies that for any VSAM cluster backup, the backup must also contain all the associated alternate index (AIX®) clusters and paths. + + When ``operation=restore`` specifies that for any VSAM cluster dumped with the SPHERE keyword, the module must also restore all associated AIX® clusters and paths. + + The alternate index is a VSAM function that allows logical records of a KSDS or ESDS to be accessed sequentially and directly by more than one key field. The cluster that has the data is called the base cluster. An alternate index cluster is then built from the base cluster. + + | **required**: False + | **type**: bool + | **default**: False + + Attributes @@ -266,6 +398,15 @@ Examples - user.gdg(0) backup_name: my.backup.dzp + - name: Backup datasets using compress + zos_backup_restore: + operation: backup + compress: true + terse: true + data_sets: + include: someds.name.here + backup_name: my.backup.dzp + - name: Backup all datasets matching the pattern USER.** to UNIX file /tmp/temp_backup.dzp, ignore recoverable errors. zos_backup_restore: operation: backup @@ -357,8 +498,52 @@ Examples operation: restore volume: MYVOL2 backup_name: /tmp/temp_backup.dzp - sms_storage_class: DB2SMS10 - sms_management_class: DB2SMS10 + sms: + storage_class: DB2SMS10 + management_class: DB2SMS10 + + - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. + Disable for all datasets SMS storage and management classes data sets. + zos_backup_restore: + operation: restore + volume: MYVOL2 + backup_name: /tmp/temp_backup.dzp + sms: + disable_automatic_class: + - "**" + disable_automatic_storage_class: true + disable_automatic_management_class: true + + - name: Restore data sets from backup stored in the MVS file MY.BACKUP.DZP + Disable for al some datasets SMS storage and management classes data sets. + zos_backup_restore: + operation: restore + volume: MYVOL2 + backup_name: MY.BACKUP.DZP + sms: + disable_automatic_class: + - "ANSIBLE.TEST.**" + - "**.ONE.**" + disable_automatic_storage_class: true + disable_automatic_management_class: true + + - name: Backup all data sets matching the pattern USER.VSAM.** to z/OS UNIX + file /tmp/temp_backup.dzp and ensure the VSAM alternate index are preserved. + zos_backup_restore: + operation: backup + data_sets: + include: user.vsam.** + backup_name: /tmp/temp_backup.dzp + index: true + + - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp + whether they exist or not and do so as authorized disabling any security checks. + zos_backup_restore: + operation: restore + backup_name: /tmp/temp_backup.dzp + access: + auth: true + share: true diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index 041182ca10..a14d39efac 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -397,6 +397,18 @@ stderr | **type**: str | **sample**: BGYSC1311E Iconv error, cannot open converter from ISO-88955-1 to IBM-1047 +stdout_lines + List of strings containing individual lines from stdout. + + | **returned**: failure + | **type**: list + +stderr_lines + List of strings containing individual lines from stderr. + + | **returned**: failure + | **type**: list + rc The return code from ZOAU dmod when json.loads() fails to parse the result from dmod diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 5fe5e565f5..96eb74e5a5 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -37,7 +37,7 @@ asa_text If neither ``src`` or ``dest`` have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. - This option is only valid for text files. If ``is_binary`` is ``true`` or ``executable`` is ``true`` as well, the module will fail. + This option is only valid for text files. If ``binary`` is ``true`` or ``executable`` is ``true`` as well, the module will fail. | **required**: False | **type**: bool @@ -109,7 +109,7 @@ dest If ``dest`` is a nonexistent data set, it will be created following the process outlined here and in the ``volume`` option. - If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *is_binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32720, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. + If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32720, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. If ``src`` is a file and ``dest`` a partitioned data set, ``dest`` does not need to include a member in its value, the module can automatically compute the resulting member name from ``src``. @@ -136,7 +136,7 @@ encoding If ``encoding`` is not provided, the module determines which local and remote charsets to convert the data from and to. Note that this is only done for text data and not binary data. - Only valid if ``is_binary`` is false. + Only valid if ``binary`` is false. | **required**: False | **type**: dict @@ -166,7 +166,7 @@ tmp_hlq | **type**: str -force +replace If set to ``true`` and the remote file or data set ``dest`` is empty, the ``dest`` will be reused. If set to ``true`` and the remote file or data set ``dest`` is NOT empty, the ``dest`` will be deleted and recreated with the ``src`` data set attributes, otherwise it will be recreated with the ``dest`` data set attributes. @@ -182,12 +182,12 @@ force | **default**: False -force_lock - By default, when ``dest`` is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force_lock`` to bypass DISP=SHR and continue with the copy operation. +force + By default, when ``dest`` is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force`` to bypass DISP=SHR and continue with the copy operation. If set to ``true`` and destination is a MVS data set opened by another process then zos_copy will try to copy using DISP=SHR. - Using ``force_lock`` uses operations that are subject to race conditions and can lead to data loss, use with caution. + Using ``force`` uses operations that are subject to race conditions and can lead to data loss, use with caution. If a data set member has aliases, and is not a program object, copying that member to a dataset that is in use will result in the aliases not being preserved in the target dataset. When this scenario occurs the module will fail. @@ -206,12 +206,12 @@ ignore_sftp_stderr | **default**: True -is_binary +binary If set to ``true``, indicates that the file or data set to be copied is a binary file or data set. - When *is_binary=true*, no encoding conversion is applied to the content, all content transferred retains the original state. + When *binary=true*, no encoding conversion is applied to the content, all content transferred retains the original state. - Use *is_binary=true* when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. + Use *binary=true* when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. | **required**: False | **type**: bool @@ -794,7 +794,7 @@ Examples zos_copy: src: /path/to/binary/file dest: HLQ.SAMPLE.PDSE(MEMBER) - is_binary: true + binary: true - name: Copy a sequential data set to a PDS member zos_copy: @@ -820,14 +820,14 @@ Examples src: HLQ.SAMPLE.PDSE dest: HLQ.EXISTING.PDSE remote_src: true - force: true + replace: true - name: Copy PDS member to a new PDS member. Replace if it already exists zos_copy: src: HLQ.SAMPLE.PDSE(SRCMEM) dest: HLQ.NEW.PDSE(DESTMEM) remote_src: true - force: true + replace: true - name: Copy a USS file to a PDSE member. If PDSE does not exist, allocate it zos_copy: @@ -1118,7 +1118,7 @@ state note A note to the user after module terminates. - | **returned**: When ``force=true`` and ``dest`` exists + | **returned**: When ``replace=true`` and ``dest`` exists | **type**: str | **sample**: No data was copied diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 1f8b6e9c25..6d76dfaf92 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -311,6 +311,18 @@ scratch | **default**: False +noscratch + When ``state=absent``, specifies whether to keep the data set's entry in the VTOC. + + If ``noscratch=True``, the data set is uncataloged but not physically removed from the volume. The Data Set Control Block is not removed from the VTOC. + + This is the equivalent of using ``NOSCRATCH`` in an ``IDCAMS DELETE`` command. + + | **required**: False + | **type**: bool + | **default**: False + + volumes If cataloging a data set, *volumes* specifies the name of the volume(s) where the data set is located. @@ -598,6 +610,18 @@ batch | **default**: False + noscratch + When ``state=absent``, specifies whether to keep the data set's entry in the VTOC. + + If ``noscratch=True``, the data set is uncataloged but not physically removed from the volume. The Data Set Control Block is not removed from the VTOC. + + This is the equivalent of using ``NOSCRATCH`` in an ``IDCAMS DELETE`` command. + + | **required**: False + | **type**: bool + | **default**: False + + extended Sets the *extended* attribute for Generation Data Groups. @@ -783,6 +807,12 @@ Examples name: someds.name.here state: absent + - name: Uncatalog a data set but do not remove it from the volume. + zos_data_set: + name: someds.name.here + state: absent + noscratch: true + - name: Delete a data set if it exists. If data set not cataloged, check on volume 222222 for the data set, and then catalog and delete if found. zos_data_set: name: someds.name.here diff --git a/docs/source/modules/zos_encode.rst b/docs/source/modules/zos_encode.rst index 4a5e61f798..b570c5ed8c 100644 --- a/docs/source/modules/zos_encode.rst +++ b/docs/source/modules/zos_encode.rst @@ -333,3 +333,24 @@ backup_name | **type**: str | **sample**: /path/file_name.2020-04-23-08-32-29-bak.tar +encoding + Specifies which encodings the destination file or data set was converted from and to. + + | **returned**: always + | **type**: dict + + from + The character set of the source *src*. + + | **returned**: always + | **type**: str + | **sample**: IBM-1047 + + to + The destination *dest* character set for the output that was written as. + + | **returned**: always + | **type**: str + | **sample**: ISO8859-1 + + diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 8a341dfcdc..a9b6a01926 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -74,7 +74,7 @@ flat | **default**: false -is_binary +binary Specifies if the file being fetched is a binary. | **required**: False @@ -173,7 +173,7 @@ Examples src: SOME.PDS.DATASET dest: /tmp/ flat: true - is_binary: true + binary: true - name: Fetch a UNIX file and don't validate its checksum zos_fetch: @@ -257,9 +257,11 @@ Return Values ------------- -file +src The source file path or data set on the remote machine. + If the source is not found, then src will be empty. + | **returned**: success | **type**: str | **sample**: SOME.DATA.SET @@ -271,7 +273,7 @@ dest | **type**: str | **sample**: /tmp/SOME.DATA.SET -is_binary +binary Indicates the transfer mode that was used to fetch. | **returned**: success @@ -296,17 +298,10 @@ data_set_type | **type**: str | **sample**: PDSE -note - Notice of module failure when ``fail_on_missing`` is false. - - | **returned**: failure and fail_on_missing=false - | **type**: str - | **sample**: The data set USER.PROCLIB does not exist. No data was fetched. - msg - Message returned on failure. + Any important messages from the module. - | **returned**: failure + | **returned**: always | **type**: str | **sample**: The source 'TEST.DATA.SET' does not exist or is uncataloged. diff --git a/docs/source/modules/zos_find.rst b/docs/source/modules/zos_find.rst index 1c3d5222c1..3e8e5e4757 100644 --- a/docs/source/modules/zos_find.rst +++ b/docs/source/modules/zos_find.rst @@ -68,6 +68,8 @@ excludes If the pattern is a regular expression, it must match the full data set name. + To exclude members, the regular expression or pattern must be enclosed in parentheses. This expression can be used alongside a pattern to exclude data set names. + | **required**: False | **type**: list | **elements**: str @@ -80,8 +82,6 @@ patterns This parameter expects a list, which can be either comma separated or YAML. - If ``pds_patterns`` is provided, ``patterns`` must be member patterns. - When searching for members within a PDS/PDSE, pattern can be a regular expression. | **required**: True @@ -102,18 +102,6 @@ size | **type**: str -pds_patterns - List of PDS/PDSE to search. Wildcard is possible. - - Required when searching for data set members. - - Valid only for ``nonvsam`` resource types. Otherwise ignored. - - | **required**: False - | **type**: list - | **elements**: str - - resource_type The types of resources to search. @@ -228,6 +216,22 @@ Examples .. code-block:: yaml+jinja + - name: Exclude all members starting with characters 'TE' in a given list datasets patterns + zos_find: + excludes: '(^te.*)' + patterns: + - IMSTEST.TEST.* + - IMSTEST.USER.* + - USER.*.LIB + + - name: Exclude datasets that includes 'DATA' and members starting with characters 'MEM' in a given list datasets patterns + zos_find: + excludes: '^.*DATA.*(^MEM.*)' + patterns: + - IMSTEST.*.TEST + - IMSTEST.*.* + - USER.*.LIB + - name: Find all data sets with HLQ 'IMS.LIB' or 'IMSTEST.LIB' that contain the word 'hello' zos_find: patterns: @@ -248,14 +252,6 @@ Examples contains: 'hello' excludes: '.*TEST' - - name: Find all members starting with characters 'TE' in a given list of PDS patterns - zos_find: - patterns: '^te.*' - pds_patterns: - - IMSTEST.TEST.* - - IMSTEST.USER.* - - USER.*.LIB - - name: Find all data sets greater than 2MB and allocated in one of the specified volumes zos_find: patterns: 'USER.*' diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index c58610d44e..1f0591818a 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -21,7 +21,8 @@ Synopsis - The job id can be specific such as "STC02560", or one that uses a pattern such as "STC*" or "*". - The job name can be specific such as "TCPIP", or one that uses a pattern such as "TCP*" or "*". - The owner can be specific such as "IBMUSER", or one that uses a pattern like "*". -- If there is no ddname, or if ddname="?", output of all the ddnames under the given job will be displayed. +- If there is no dd_name, or if dd_name="?", output of all the dds under the given job will be displayed. +- If SYSIN DDs are needed, *sysin_dd* should be set to ``true``. @@ -52,13 +53,21 @@ owner | **type**: str -ddname +dd_name Data definition name (show only this DD on a found job). (e.g "JESJCL", "?") | **required**: False | **type**: str +sysin_dd + Whether to include SYSIN DDs as part of the output. + + | **required**: False + | **type**: bool + | **default**: False + + Attributes @@ -81,21 +90,26 @@ Examples .. code-block:: yaml+jinja - - name: Job output with ddname + - name: Job output with dd_name zos_job_output: job_id: "STC02560" - ddname: "JESMSGLG" + dd_name: "JESMSGLG" - - name: JES Job output without ddname + - name: JES Job output without dd_name zos_job_output: job_id: "STC02560" - - name: JES Job output with all ddnames + - name: JES Job output with all dd_name zos_job_output: job_id: "STC*" job_name: "*" owner: "IBMUSER" - ddname: "?" + dd_name: "?" + + - name: Query a job's output including SYSIN DDs + zos_job_output: + job_id: "JOB00548" + sysin_dd: true @@ -125,7 +139,7 @@ jobs "class": "R", "content_type": "JOB", "cpu_time": 1414, - "ddnames": [ + "dds": [ { "byte_count": "775", "content": [ @@ -147,7 +161,7 @@ jobs "- 6 SYSOUT SPOOL KBYTES", "- 0.00 MINUTES EXECUTION TIME" ], - "ddname": "JESMSGLG", + "dd_name": "JESMSGLG", "id": "2", "procstep": "", "record_count": "17", @@ -171,7 +185,7 @@ jobs " 6 //SYSUT2 DD SYSOUT=* ", " 7 // " ], - "ddname": "JESJCL", + "dd_name": "JESJCL", "id": "3", "procstep": "", "record_count": "14", @@ -200,7 +214,7 @@ jobs " IEF033I JOB/HELLO /STOP 2020049.1025 ", " CPU: 0 HR 00 MIN 00.00 SEC SRB: 0 HR 00 MIN 00.00 SEC " ], - "ddname": "JESYSMSG", + "dd_name": "JESYSMSG", "id": "4", "procstep": "", "record_count": "19", @@ -214,7 +228,7 @@ jobs " ", " PROCESSING ENDED AT EOD " ], - "ddname": "SYSPRINT", + "dd_name": "SYSPRINT", "id": "102", "procstep": "", "record_count": "4", @@ -225,7 +239,7 @@ jobs "content": [ " HELLO, WORLD " ], - "ddname": "SYSUT2", + "dd_name": "SYSUT2", "id": "103", "procstep": "", "record_count": "1", @@ -247,14 +261,14 @@ jobs "code": 0, "msg": "CC 0000", "msg_code": "0000", - "msg_txt": "", - "steps": [ - { - "step_cc": 0, - "step_name": "STEP0001" - } - ] + "msg_txt": "" }, + "steps": [ + { + "step_cc": 0, + "step_name": "STEP0001" + } + ], "subsystem": "STL1", "system": "STL1" } @@ -331,13 +345,13 @@ jobs | **type**: str | **sample**: 00:00:10 - ddnames + dds Data definition names. | **type**: list | **elements**: dict - ddname + dd_name Data definition name. | **type**: str @@ -374,7 +388,7 @@ jobs | **sample**: 574 content - The ddname content. + The dd content. | **type**: list | **elements**: str @@ -448,13 +462,7 @@ jobs "code": 0, "msg": "CC 0000", "msg_code": "0000", - "msg_txt": "", - "steps": [ - { - "step_cc": 0, - "step_name": "STEP0001" - } - ] + "msg_txt": "" } } @@ -481,29 +489,39 @@ jobs | **type**: int - steps - Series of JCL steps that were executed and their return codes. - | **type**: list - | **elements**: dict + steps + Series of JCL steps that were executed and their return codes. + + | **type**: list + | **elements**: dict + | **sample**: - step_name - Name of the step shown as "was executed" in the DD section. + .. code-block:: json - | **type**: str - | **sample**: STEP0001 + [ + { + "step_cc": 0, + "step_name": "STEP0001" + } + ] - step_cc - The CC returned for this step in the DD section. + step_name + Name of the step shown as "was executed" in the DD section. - | **type**: int + | **type**: str + | **sample**: STEP0001 + step_cc + The CC returned for this step in the DD section. + + | **type**: int changed Indicates if any changes were made during module operation - | **returned**: on success + | **returned**: always | **type**: bool diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index 38cea61e34..94f3a934ad 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -138,6 +138,11 @@ changed | **returned**: always | **type**: bool + | **sample**: + + .. code-block:: json + + true jobs The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret_code dictionary with parameter msg_txt = The job could not be found. @@ -158,16 +163,28 @@ jobs "creation_time": "12:13:00", "execution_node": "STL1", "execution_time": "00:00:02", - "job_class": "K", + "job_class": "STC", "job_id": "JOB01427", "job_name": "LINKJOB", "origin_node": "STL1", "owner": "ADMIN", "priority": 1, + "program_name": "BPXBATCH", "queue_position": 3, - "ret_code": "null", + "ret_code": { + "code": "0", + "msg": "CC", + "msg_code": "0000", + "msg_txt": "CC" + }, + "steps": [ + { + "step_cc": 0, + "step_name": "STEP0001" + } + ], "subsystem": "STL1", - "svc_class": "?", + "svc_class": "null", "system": "STL1" }, { @@ -184,11 +201,15 @@ jobs "origin_node": "STL1", "owner": "ADMIN", "priority": 0, + "program_name": "null", "queue_position": 0, "ret_code": { "code": "null", - "msg": "CANCELED" + "msg": "CANCELED", + "msg_code": "null", + "msg_txt": "CANCELED" }, + "steps": [], "subsystem": "STL1", "svc_class": "E", "system": "STL1" @@ -243,11 +264,11 @@ jobs | **type**: str | **sample**: STL1 - cpu_time - Sum of the CPU time used by each job step, in microseconds. + origin_node + Origin node that submitted the job. - | **type**: int - | **sample**: 5 + | **type**: str + | **sample**: STL1 execution_node Execution node that picked the job and executed it. @@ -255,11 +276,11 @@ jobs | **type**: str | **sample**: STL1 - origin_node - Origin node that submitted the job. + cpu_time + Sum of the CPU time used by each job step, in microseconds. - | **type**: str - | **sample**: STL1 + | **type**: int + | **sample**: 5 ret_code Return code output collected from job log. @@ -274,13 +295,7 @@ jobs "code": 0, "msg": "CC 0000", "msg_code": "0000", - "msg_txt": "", - "steps": [ - { - "step_cc": 0, - "step_name": "STEP0001" - } - ] + "msg_txt": "" } } @@ -326,6 +341,36 @@ jobs + steps + Series of JCL steps that were executed and their return codes. + + | **type**: list + | **elements**: dict + | **sample**: + + .. code-block:: json + + { + "steps": [ + { + "step_cc": 0, + "step_name": "STEP0001" + } + ] + } + + step_name + Name of the step shown as "was executed" in the DD section. + + | **type**: str + | **sample**: STEP0001 + + step_cc + The CC returned for this step in the DD section. + + | **type**: int + + job_class Job class for this job. @@ -380,7 +425,7 @@ jobs | **sample**: 00:00:10 -message +msg Message returned on failure. | **returned**: failure diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 6d31b6abd0..fcb653f705 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -43,27 +43,24 @@ src | **type**: str -location - The JCL location. Supported choices are ``data_set``, ``uss`` or ``local``. +remote_src + If set to ``false``, the module searches for ``src`` in the controller node. - ``data_set`` can be a PDS, PDSE, sequential data set, or a generation data set. - - ``uss`` means the JCL location is located in UNIX System Services (USS). - - ``local`` means locally to the Ansible control node. + If set to ``true``, the module searches for the file ``src`` in the managed node. | **required**: False - | **type**: str - | **default**: data_set - | **choices**: data_set, uss, local + | **type**: bool + | **default**: True + +wait_time + Option *wait_time* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. -wait_time_s - Option *wait_time_s* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. + *wait_time* is measured in seconds and must be a value greater than 0 and less than 86400. - *wait_time_s* is measured in seconds and must be a value greater than 0 and less than 86400. + The module can submit and forget jobs by setting *wait_time* to 0. This way the module will not try to retrieve the job details other than job id. Job details and contents can be retrieved later by using `zos_job_query <./zos_job_query.html>`_ or `zos_job_output <./zos_job_output.html>`_ if needed. - The module can submit and forget jobs by setting *wait_time_s* to 0. This way the module will not try to retrieve the job details other than job id. Job details and contents can be retrieved later by using `zos_job_query <./zos_job_query.html>`_ or `zos_job_output <./zos_job_output.html>`_ if needed. + If *remote_src=False* and *wait_time=0*, the module will not clean the copy of the file on the remote system, to avoid problems with job submission. | **required**: False | **type**: int @@ -80,7 +77,7 @@ max_rc return_output Whether to print the DD output. - If false, an empty list will be returned in the ddnames field. + If false, an empty list will be returned in the dds field. | **required**: False | **type**: bool @@ -92,7 +89,7 @@ volume When configured, the `zos_job_submit <./zos_job_submit.html>`_ will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - Ignored for *location=uss* and *location=local*. + Ignored for *remote_src=False*. | **required**: False | **type**: str @@ -101,7 +98,7 @@ volume encoding Specifies which encoding the local JCL file should be converted from and to, before submitting the job. - This option is only supported for when *location=local*. + This option is only supported for when *remote_src=False*. If this parameter is not provided, and the z/OS systems default encoding can not be identified, the JCL file will be converted from UTF-8 to IBM-1047 by default, otherwise the module will detect the z/OS system encoding. @@ -253,6 +250,7 @@ template_parameters | **default**: \\n | **choices**: \\n, \\r, \\r\\n + auto_reload Whether to reload a template file when it has changed after the task has started. @@ -295,19 +293,19 @@ Examples - name: Submit JCL in a PDSE member. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: data_set + remote_src: true register: response - name: Submit JCL in USS with no DDs in the output. zos_job_submit: src: /u/tester/demo/sample.jcl - location: uss + remote_src: true return_output: false - name: Convert local JCL to IBM-037 and submit the job. zos_job_submit: src: /Users/maxy/ansible-playbooks/provision/sample.jcl - location: local + remote_src: false encoding: from: ISO8859-1 to: IBM-037 @@ -315,36 +313,36 @@ Examples - name: Submit JCL in an uncataloged PDSE on volume P2SS01. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: data_set + remote_src: true volume: P2SS01 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: data_set - wait_time_s: 30 + remote_src: true + wait_time: 30 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: data_set - wait_time_s: 30 + remote_src: true + wait_time: 30 - name: Submit JCL and set the max return code the module should fail on to 16. zos_job_submit: src: HLQ.DATA.LLQ - location: data_set + remote_src: true max_rc: 16 - name: Submit JCL from the latest generation data set in a generation data group. zos_job_submit: src: HLQ.DATA.GDG(0) - location: data_set + remote_src: true - name: Submit JCL from a previous generation data set in a generation data group. zos_job_submit: src: HLQ.DATA.GDG(-2) - location: data_set + remote_src: true @@ -380,12 +378,11 @@ jobs [ { "asid": 0, - "class": "K", "content_type": "JOB", "cpu_time": 1, "creation_date": "2023-05-03", "creation_time": "12:13:00", - "ddnames": [ + "dds": [ { "byte_count": "677", "content": [ @@ -406,7 +403,7 @@ jobs "- 12 SYSOUT SPOOL KBYTES", "- 0.00 MINUTES EXECUTION TIME" ], - "ddname": "JESMSGLG", + "dd_name": "JESMSGLG", "id": "2", "procstep": "", "record_count": "16", @@ -463,7 +460,7 @@ jobs " 15 ++SYSPRINT DD SYSOUT=* ", " ++* " ], - "ddname": "JESJCL", + "dd_name": "JESJCL", "id": "3", "procstep": "", "record_count": "47", @@ -517,7 +514,7 @@ jobs " IEF033I JOB/DBDGEN00/STOP 2020073.1250 ", " CPU: 0 HR 00 MIN 00.03 SEC SRB: 0 HR 00 MIN 00.00 SEC " ], - "ddname": "JESYSMSG", + "dd_name": "JESYSMSG", "id": "4", "procstep": "", "record_count": "44", @@ -572,7 +569,7 @@ jobs " **** END OF MESSAGE SUMMARY REPORT **** ", " " ], - "ddname": "SYSPRINT", + "dd_name": "SYSPRINT", "id": "102", "procstep": "L", "record_count": "45", @@ -585,7 +582,6 @@ jobs "job_id": "JOB00361", "job_name": "DBDGEN00", "origin_node": "STL1", - "owner": "OMVSADM", "priority": 1, "program_name": "IEBGENER", "queue_position": 3, @@ -593,14 +589,14 @@ jobs "code": 0, "msg": "CC 0000", "msg_code": "0000", - "msg_txt": "", - "steps": [ - { - "step_cc": 0, - "step_name": "DLORD6" - } - ] + "msg_txt": "" }, + "steps": [ + { + "step_cc": 0, + "step_name": "DLORD6" + } + ], "subsystem": "STL1", "svc_class": "?", "system": "STL1" @@ -648,13 +644,13 @@ jobs | **type**: str | **sample**: 00:00:10 - ddnames + dds Data definition names. | **type**: list | **elements**: dict - ddname + dd_name Data definition name. | **type**: str @@ -691,7 +687,7 @@ jobs | **sample**: 574 content - The ddname content. + The dd content. | **type**: list | **elements**: str @@ -730,13 +726,7 @@ jobs "code": 0, "msg": "CC 0000", "msg_code": "0000", - "msg_txt": "", - "steps": [ - { - "step_cc": 0, - "step_name": "STEP0001" - } - ] + "msg_txt": "" } } @@ -796,23 +786,35 @@ jobs | **type**: int - steps - Series of JCL steps that were executed and their return codes. - | **type**: list - | **elements**: dict + steps + Series of JCL steps that were executed and their return codes. - step_name - Name of the step shown as "was executed" in the DD section. + | **type**: list + | **elements**: dict + | **sample**: + + .. code-block:: json - | **type**: str - | **sample**: STEP0001 + { + "steps": [ + { + "step_cc": 0, + "step_name": "STEP0001" + } + ] + } - step_cc - The CC returned for this step in the DD section. + step_name + Name of the step shown as "was executed" in the DD section. - | **type**: int + | **type**: str + | **sample**: STEP0001 + step_cc + The CC returned for this step in the DD section. + + | **type**: int job_class diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index 3ed1a1e339..60fa24005a 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -335,13 +335,31 @@ msg | **type**: str | **sample**: Parameter verification failed -return_content +stdout + The stdout from ZOAU dsed command. + + | **returned**: always + | **type**: str + +stderr The error messages from ZOAU dsed - | **returned**: failure + | **returned**: always | **type**: str | **sample**: BGYSC1311E Iconv error, cannot open converter from ISO-88955-1 to IBM-1047 +stdout_lines + List of strings containing individual lines from stdout. + + | **returned**: always + | **type**: list + +stderr_lines + List of strings containing individual lines from stderr. + + | **returned**: always + | **type**: list + backup_name Name of the backup file or data set that was created. diff --git a/docs/source/modules/zos_mount.rst b/docs/source/modules/zos_mount.rst index 703795c3da..bd0fd6e7cf 100644 --- a/docs/source/modules/zos_mount.rst +++ b/docs/source/modules/zos_mount.rst @@ -63,25 +63,25 @@ fs_type state The desired status of the described mount (choice). - If *state=mounted* and *src* are not in use, the module will add the file system entry to the parmlib member *persistent/data_store* if not present. The *path* will be updated, the device will be mounted and the module will complete successfully with *changed=True*. + If *state=mounted* and *src* are not in use, the module will add the file system entry to the parmlib member *persistent/name* if not present. The *path* will be updated, the device will be mounted and the module will complete successfully with *changed=True*. - If *state=mounted* and *src* are in use, the module will add the file system entry to the parmlib member *persistent/data_store* if not present. The *path* will not be updated, the device will not be mounted and the module will complete successfully with *changed=False*. + If *state=mounted* and *src* are in use, the module will add the file system entry to the parmlib member *persistent/name* if not present. The *path* will not be updated, the device will not be mounted and the module will complete successfully with *changed=False*. - If *state=unmounted* and *src* are in use, the module will **not** add the file system entry to the parmlib member *persistent/data_store*. The device will be unmounted and the module will complete successfully with *changed=True*. + If *state=unmounted* and *src* are in use, the module will **not** add the file system entry to the parmlib member *persistent/name*. The device will be unmounted and the module will complete successfully with *changed=True*. - If *state=unmounted* and *src* are not in use, the module will **not** add the file system entry to parmlib member *persistent/data_store*.The device will remain unchanged and the module will complete with *changed=False*. + If *state=unmounted* and *src* are not in use, the module will **not** add the file system entry to parmlib member *persistent/name*.The device will remain unchanged and the module will complete with *changed=False*. - If *state=present*, the module will add the file system entry to the provided parmlib member *persistent/data_store* if not present. The module will complete successfully with *changed=True*. + If *state=present*, the module will add the file system entry to the provided parmlib member *persistent/name* if not present. The module will complete successfully with *changed=True*. - If *state=absent*, the module will remove the file system entry to the provided parmlib member *persistent/data_store* if present. The module will complete successfully with *changed=True*. + If *state=absent*, the module will remove the file system entry to the provided parmlib member *persistent/name* if present. The module will complete successfully with *changed=True*. - If *state=remounted*, the module will **not** add the file system entry to parmlib member *persistent/data_store*. The device will be unmounted and mounted, the module will complete successfully with *changed=True*. + If *state=remounted*, the module will **not** add the file system entry to parmlib member *persistent/name*. The device will be unmounted and mounted, the module will complete successfully with *changed=True*. | **required**: False @@ -91,13 +91,13 @@ state persistent - Add or remove mount command entries to provided *data_store* + Add or remove mount command entries to provided *name* | **required**: False | **type**: dict - data_store + name The data set name used for persisting a mount command. This is usually BPXPRMxx or a copy. | **required**: True @@ -105,7 +105,7 @@ persistent backup - Creates a backup file or backup data set for *data_store*, including the timestamp information to ensure that you retrieve the original parameters defined in *data_store*. + Creates a backup file or backup data set for *name*, including the timestamp information to ensure that you retrieve the original parameters defined in *name*. *backup_name* can be used to specify a backup file name if *backup=true*. @@ -119,7 +119,7 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source *data_store* is a USS file or path, the *backup_name* name can be relative or absolute for file or path name. + If the source *name* is a USS file or path, the *backup_name* name can be relative or absolute for file or path name. If the source is an MVS data set, the backup_name must be an MVS data set name. @@ -131,10 +131,10 @@ persistent | **type**: str - comment - If provided, this is used as a comment that surrounds the command in the *persistent/data_store* + marker + If provided, this is used as a marker that surrounds the command in the *persistent/name* - Comments are used to encapsulate the *persistent/data_store* entry such that they can easily be understood and located. + Comments are used to encapsulate the *persistent/name* entry such that they can easily be understood and located. | **required**: False | **type**: list @@ -334,8 +334,8 @@ Examples fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) - comment: For Tape2 project + name: SYS1.PARMLIB(BPXPRMAA) + marker: For Tape2 project - name: Mount a filesystem and record change in BPXPRMAA after backing up to BPXPRMAB. zos_mount: @@ -344,10 +344,10 @@ Examples fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) + name: SYS1.PARMLIB(BPXPRMAA) backup: true backup_name: SYS1.PARMLIB(BPXPRMAB) - comment: For Tape2 project + marker: For Tape2 project - name: Mount a filesystem ignoring uid/gid values. zos_mount: @@ -454,7 +454,7 @@ persistent | **returned**: always | **type**: dict - data_store + name The persistent store name where the mount was written to. | **returned**: always @@ -479,8 +479,8 @@ persistent | **type**: str | **sample**: SYS1.FILESYS(PRMAABAK) - comment - The text that was used in markers around the *Persistent/data_store* entry. + marker + The text that was used in markers around the *Persistent/name* entry. | **returned**: always | **type**: list @@ -494,6 +494,15 @@ persistent ] ] + state + The state of the persistent entry in the persistent data set. + + Possible values are ``added`` and ``removed``. + + | **returned**: always + | **type**: str + | **sample**: added + unmount_opts Describes how the unmount is to be performed. diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index 8710256f74..de7bf58ce8 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -51,20 +51,29 @@ verbose | **default**: False -wait_time_s +wait_time Set maximum time in seconds to wait for the commands to execute. When set to 0, the system default is used. This option is helpful on a busy system requiring more time to execute commands. - Setting *wait* can instruct if execution should wait the full *wait_time_s*. + Setting *wait* can instruct if execution should wait the full *wait_time*. | **required**: False | **type**: int | **default**: 1 +time_unit + Set the ``wait_time`` unit of time, which can be ``s`` (seconds) or ``cs`` (centiseconds). + + | **required**: False + | **type**: str + | **default**: s + | **choices**: s, cs + + case_sensitive If ``true``, the command will not be converted to uppercase before execution. Instead, the casing will be preserved just as it was written in a task. @@ -111,12 +120,18 @@ Examples - name: Execute operator command to show jobs, always waiting 5 seconds for response zos_operator: cmd: 'd a,all' - wait_time_s: 5 + wait_time: 5 - name: Display the system symbols and associated substitution texts. zos_operator: cmd: 'D SYMBOLS' + - name: Execute an operator command to show device status and allocation wait 10 centiseconds. + zos_operator: + cmd: 'd u' + wait_time : 10 + time_unit : 'cs' + @@ -150,7 +165,7 @@ cmd | **sample**: d u,all elapsed - The number of seconds that elapsed waiting for the command to complete. + The number of seconds or centiseconds that elapsed waiting for the command to complete. | **returned**: always | **type**: float @@ -160,13 +175,20 @@ elapsed 51.53 -wait_time_s - The maximum time in seconds to wait for the commands to execute. +wait_time + The maximum time in the time_unit set to wait for the commands to execute. | **returned**: always | **type**: int | **sample**: 5 +time_unit + The time unit set for wait_time. + + | **returned**: always + | **type**: str + | **sample**: s + content The resulting text from the command submitted. diff --git a/docs/source/modules/zos_operator_action_query.rst b/docs/source/modules/zos_operator_action_query.rst index 350f87266b..b69be4d463 100644 --- a/docs/source/modules/zos_operator_action_query.rst +++ b/docs/source/modules/zos_operator_action_query.rst @@ -37,7 +37,7 @@ system | **type**: str -message_id +msg_id Return outstanding messages requiring operator action awaiting a reply for a particular message identifier. If the message identifier is not specified, all outstanding messages for all message identifiers are returned. @@ -59,7 +59,7 @@ job_name | **type**: str -message_filter +msg_filter Return outstanding messages requiring operator action awaiting a reply that match a regular expression (regex) filter. If the message filter is not specified, all outstanding messages are returned regardless of their content. @@ -69,7 +69,7 @@ message_filter filter - Specifies the substring or regex to match to the outstanding messages, see *use_regex*. + Specifies the substring or regex to match to the outstanding messages, see *literal*. All special characters in a filter string that are not a regex are escaped. @@ -81,16 +81,16 @@ message_filter | **type**: str - use_regex + literal Indicates that the value for *filter* is a regex or a string to match. - If False, the module assumes that *filter* is not a regex and matches the *filter* substring on the outstanding messages. + If False, the module creates a regex from the *filter* string and matches it to the outstanding messages. - If True, the module creates a regex from the *filter* string and matches it to the outstanding messages. + If True, the module assumes that *filter* is not a regex and matches the *filter* substring on the outstanding messages. | **required**: False | **type**: bool - | **default**: False + | **default**: True @@ -126,11 +126,11 @@ Examples - name: Display all outstanding messages whose message id begin with dsi* zos_operator_action_query: - message_id: dsi* + msg_id: dsi* - name: Display all outstanding messages that have the text IMS READY in them zos_operator_action_query: - message_filter: + msg_filter: filter: IMS READY - name: Display all outstanding messages where the job name begins with 'mq', @@ -138,11 +138,11 @@ Examples pattern 'IMS' zos_operator_action_query: job_name: mq* - message_id: dsi* + msg_id: dsi* system: mv29 - message_filter: + msg_filter: filter: ^.*IMS.*$ - use_regex: true + literal: true @@ -172,14 +172,14 @@ changed count The total number of outstanding messages. - | **returned**: on success + | **returned**: always | **type**: int | **sample**: 12 actions The list of the outstanding messages. - | **returned**: success + | **returned**: always | **type**: list | **elements**: dict | **sample**: @@ -190,8 +190,8 @@ actions { "job_id": "STC01537", "job_name": "IM5HCONN", - "message_id": "HWSC0000I", - "message_text": "*399 HWSC0000I *IMS CONNECT READY* IM5HCONN", + "msg_id": "HWSC0000I", + "msg_txt": "*399 HWSC0000I *IMS CONNECT READY* IM5HCONN", "number": "001", "system": "MV27", "type": "R" @@ -199,8 +199,8 @@ actions { "job_id": "STC01533", "job_name": "IM5HCTRL", - "message_id": "DFS3139I", - "message_text": "*400 DFS3139I IMS INITIALIZED, AUTOMATIC RESTART PROCEEDING IM5H", + "msg_id": "DFS3139I", + "msg_txt": "*400 DFS3139I IMS INITIALIZED, AUTOMATIC RESTART PROCEEDING IM5H", "number": "002", "system": "MV27", "type": "R" @@ -235,8 +235,8 @@ actions | **type**: str | **sample**: STC01537 - message_text - Content of the outstanding message requiring operator action awaiting a reply. If *message_filter* is set, *message_text* will be filtered accordingly. + msg_txt + Content of the outstanding message requiring operator action awaiting a reply. If *msg_filter* is set, *msg_txt* will be filtered accordingly. | **returned**: success | **type**: str @@ -249,7 +249,7 @@ actions | **type**: str | **sample**: IM5HCONN - message_id + msg_id Message identifier for outstanding message requiring operator action awaiting a reply. | **returned**: success diff --git a/docs/source/modules/zos_replace.rst b/docs/source/modules/zos_replace.rst index 70b2adf2f7..ba099b2b20 100644 --- a/docs/source/modules/zos_replace.rst +++ b/docs/source/modules/zos_replace.rst @@ -110,7 +110,7 @@ literal To interpret one option as a literal, use *literal=regexp*, *literal=after* or *literal=before*. - To interpret multiple options as a literal, use a list such as ``['after', 'before']`` or ``['regex', 'after', 'before']`` + To interpret multiple options as a literal, use a list such as ``['after', 'before']`` or ``['regex', 'after', 'before']``. | **required**: False | **type**: raw diff --git a/docs/source/modules/zos_script.rst b/docs/source/modules/zos_script.rst index e85fdb14f0..3d5b74e13f 100644 --- a/docs/source/modules/zos_script.rst +++ b/docs/source/modules/zos_script.rst @@ -223,6 +223,7 @@ template_parameters | **default**: \\n | **choices**: \\n, \\r, \\r\\n + auto_reload Whether to reload a template file when it has changed after the task has started. diff --git a/docs/source/modules/zos_started_task.rst b/docs/source/modules/zos_started_task.rst index b4346876fb..bc643911f1 100644 --- a/docs/source/modules/zos_started_task.rst +++ b/docs/source/modules/zos_started_task.rst @@ -44,8 +44,8 @@ armrestart | **type**: bool -asid - When *state* is ``cancelled``, ``stopped`` or ``forced``, *asid* is the hexadecimal address space identifier of the work unit you want to cancel, stop or force. +asidx + When *state* is ``cancelled``, ``stopped`` or ``forced``, *asidx* is the hexadecimal address space identifier of the work unit you want to cancel, stop or force. Only applicable when *state* is ``stopped``, ``cancelled``, or ``forced``, otherwise ignored. @@ -53,24 +53,6 @@ asid | **type**: str -device_type - Type of the output device (if any) associated with the task. - - Only applicable when *state* is ``started``, otherwise ignored. - - | **required**: False - | **type**: str - - -device_number - Number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must precede a 4-digit number but not a 3-digit number. - - Only applicable when *state* is ``started``, otherwise ignored. - - | **required**: False - | **type**: str - - dump Whether to perform a dump. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) depends on the JCL for the job. @@ -133,17 +115,6 @@ parameters | **elements**: str -retry_force - Indicates whether retry will be attempted on ABTER:ref:`abnormal termination `. - - *tcb_address* is mandatory to use *retry_force*. - - Only applicable when *state* is ``forced``, otherwise ignored. - - | **required**: False - | **type**: bool - - reus_asid When *reus_asid* is ``True`` and REUSASID(YES) is specified in the DIAGxx parmlib member, a reusable ASID is assigned to the address space created by the START command. If *reus_asid* is not specified or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. @@ -176,19 +147,10 @@ subsystem | **type**: str -tcb_address - 6-digit hexadecimal TCB address of the task to terminate. - - Only applicable when *state* is ``forced``, otherwise ignored. - - | **required**: False - | **type**: str - - -volume - If *device_type* is a tape or direct-access device, the serial number of the volume, mounted on the device. +task_id + The started task id starts with STC. - Only applicable when *state* is ``started``, otherwise ignored. + Only applicable when *state* is ``displayed``, ``modified``, ``cancelled``, ``stopped``, or ``forced``, otherwise ignored. | **required**: False | **type**: str @@ -271,6 +233,10 @@ Examples zos_started_task: state: "displayed" task_name: "PROCAPP" + - name: Display a started task using a started task id. + zos_started_task: + state: "displayed" + task_id: "STC00012" - name: Display all started tasks that begin with an s using a wildcard. zos_started_task: state: "displayed" @@ -283,30 +249,47 @@ Examples zos_started_task: state: "cancelled" task_name: "SAMPLE" + - name: Cancel a started task using a started task id. + zos_started_task: + state: "cancelled" + task_id: "STC00093" - name: Cancel a started task using it's task name and ASID. zos_started_task: state: "cancelled" task_name: "SAMPLE" - asid: 0014 + asidx: 0014 - name: Modify a started task's parameters. zos_started_task: state: "modified" task_name: "SAMPLE" parameters: ["XX=12"] + - name: Modify a started task's parameters using a started task id. + zos_started_task: + state: "modified" + task_id: "STC00034" + parameters: ["XX=12"] - name: Stop a started task using it's task name. zos_started_task: state: "stopped" task_name: "SAMPLE" + - name: Stop a started task using a started task id. + zos_started_task: + state: "stopped" + task_id: "STC00087" - name: Stop a started task using it's task name, identifier and ASID. zos_started_task: state: "stopped" task_name: "SAMPLE" identifier: "SAMPLE" - asid: 00A5 + asidx: 00A5 - name: Force a started task using it's task name. zos_started_task: state: "forced" task_name: "SAMPLE" + - name: Force a started task using it's task id. + zos_started_task: + state: "forced" + task_id: "STC00065" @@ -346,8 +329,12 @@ rc The return code is 1 when opercmd throws any error. + The return code is 4 when task_id format is invalid. + The return code is 5 when any parameter validation failed. + The return code is 8 when started task is not found using task_id. + | **returned**: changed | **type**: int @@ -407,21 +394,7 @@ tasks | **type**: list | **elements**: dict - address_space_second_table_entry - The control block used to manage memory for a started task - - | **type**: str - | **sample**: 03E78500 - - affinity - The identifier of the processor, for up to any four processors, if the job requires the services of specific processors. - - affinity=NONE means the job can run on any processor. - - | **type**: str - | **sample**: NONE - - asid + asidx Address space identifier (ASID), in hexadecimal. | **type**: str @@ -435,34 +408,6 @@ tasks | **type**: str | **sample**: 000.008S - dataspaces - The started task data spaces details. - - | **returned**: success - | **type**: list - | **elements**: dict - - data_space_address_entry - Central address of the data space ASTE. - - | **type**: str - | **sample**: 058F2180 - - data_space_name - Data space name associated with the address space. - - | **type**: str - | **sample**: CIRRGMAP - - - domain_number - The z/OS system or sysplex domain where started task is running. - - domain_number=N/A if the system is operating in goal mode. - - | **type**: str - | **sample**: N/A - elapsed_time For address spaces other than system address spaces, the elapsed time since job select time. @@ -475,75 +420,17 @@ tasks | **type**: str | **sample**: 812.983S - priority - Priority of a started task, as determined by the Workload Manager (WLM), based on the service class and importance assigned to it. - - | **type**: str - | **sample**: 1 - - proc_step_name - For APPC-initiated transactions, the user ID requesting the transaction. - - The name of a step within a cataloged procedure that was called by the step specified in field sss. - - Blank, if there is no cataloged procedure. - - The identifier of the requesting transaction program. - - | **type**: str - | **sample**: VLF - - program_event_recording - YES if A PER trap is active in the address space. - - NO if No PER trap is active in the address space. - - | **type**: str - - program_name - The name of the program(load module) that created or is running in the started task's address space. - - program_name=N/A if the system is operating in goal mode. - - | **type**: str - | **sample**: N/A - - queue_scan_count - YES if the address space has been quiesced. - - NO if the address space is not quiesced. - - | **type**: str - - resource_group - The name of the resource group currently associated with the service class. It can also be N/A if there is no resource group association. - - | **type**: str - | **sample**: N/A - - server - YES if the address space is a server. - - No if the address space is not a server. - - | **type**: str - - started_class_list - The name of the service class currently associated with the address space. - - | **type**: str - | **sample**: SYSSTC - started_time The time when the started task started. | **type**: str | **sample**: 2025-09-11 18:21:50.293644+00:00 - system_management_control - Number of outstanding step-must-complete requests. + task_id + The started task id. | **type**: str + | **sample**: STC00018 task_identifier The name of a system address space. @@ -569,44 +456,6 @@ tasks | **type**: str | **sample**: SAMPLE - task_status - ``IN`` for swapped in. - - ``OUT`` for swapped out, ready to run. - - ``OWT`` for swapped out, waiting, not ready to run. - - ``OU*`` for in process of being swapped out. - - ``IN*`` for in process of being swapped in. - - ``NSW`` for non-swappable. - - | **type**: str - | **sample**: NSW - - task_type - ``S`` for started task. - - ``A`` for an attached APPC transaction program. - - ``I`` for initiator address space. - - ``J`` for job - - ``M`` for mount - - ``*`` for system address space - - | **type**: str - | **sample**: S - - workload_manager - The name of the workload currently associated with the address space. - - | **type**: str - | **sample**: SYSTEM - verbose_output If ``verbose=true``, the system logs related to the started task executed state will be shown. diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index eec87c3eca..44e4a7784b 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -52,7 +52,7 @@ format | **type**: dict - name + type The compression format used while archiving. | **required**: True @@ -60,7 +60,7 @@ format | **choices**: bz2, gz, tar, zip, terse, xmit, pax - format_options + options Options specific to a compression format. | **required**: False @@ -80,7 +80,7 @@ format | **type**: str - use_adrdssu + adrdssu If set to true, the ``zos_unarchive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to uncompress data sets from a portable format after using ``xmit`` or ``terse``. | **required**: False @@ -89,7 +89,7 @@ format dest_volumes - When *use_adrdssu=True*, specify the volume the data sets will be written to. + When *adrdssu=True*, specify the volume the data sets will be written to. If no volume is specified, storage management rules will be used to determine the volume where the file will be unarchived. @@ -424,14 +424,14 @@ Examples zos_unarchive: src: "./files/archive_folder_test.tar" format: - name: tar + type: tar # use include - name: Unarchive a bzip file selecting only a file to unpack. zos_unarchive: src: "/tmp/test.bz2" format: - name: bz2 + type: bz2 include: - 'foo.txt' @@ -440,7 +440,7 @@ Examples zos_unarchive: src: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse exclude: - USER.ARCHIVE.TEST1 - USER.ARCHIVE.TEST2 @@ -450,16 +450,16 @@ Examples zos_unarchive: src: "USER.ARCHIVE(0)" format: - name: terse + type: terse # List option - name: List content from XMIT zos_unarchive: src: "USER.ARCHIVE.RESULT.XMIT" format: - name: xmit - format_options: - use_adrdssu: true + type: xmit + options: + adrdssu: true list: true # Encoding example @@ -467,7 +467,7 @@ Examples zos_unarchive: src: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse encoding: from: IBM-1047 to: ISO8859-1 @@ -476,7 +476,7 @@ Examples zos_unarchive: src: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse encoding: from: IBM-1047 to: ISO8859-1 From 39d72e07ca545ec17ad05c21e75cc4427d727a70 Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Fri, 10 Oct 2025 19:05:36 +0530 Subject: [PATCH 72/73] Adding enhancements Updated logic related to handling duplicate tasks Updated format of cpu_time and elapsed_time --- plugins/modules/zos_started_task.py | 158 ++++++++++++------ .../modules/test_zos_started_task_func.py | 36 ++-- 2 files changed, 127 insertions(+), 67 deletions(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index 68d8034e6d..f5b657c1c1 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -377,30 +377,24 @@ cpu_time: description: - The processor time used by the address space, including the initiator. This time does not include SRB time. - - cpu_time has one of following formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. - sss.tttS when time is less than 1000 seconds - hh.mm.ss when time is at least 1000 seconds, but less than 100 hours - hhhhh.mm when time is at least 100 hours - ******** when time exceeds 100000 hours - NOTAVAIL when the TOD clock is not working + - I(cpu_time) format is hhhhh.mm.ss.SSS(hours.minutes.seconds.milliseconds). + - C(********) when time exceeds 100000 hours. + - C(NOTAVAIL) when the TOD clock is not working. type: str - sample: 000.008S + sample: 00000.00.00.003 elapsed_time: description: - - For address spaces other than system address spaces, the elapsed time since job select time. - - For system address spaces created before master scheduler initialization, the elapsed time since master scheduler initialization. - - For system address spaces created after master scheduler initialization, the elapsed time since system address space creation. - - elapsed_time has one of following formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. - sss.tttS when time is less than 1000 seconds - hh.mm.ss when time is at least 1000 seconds, but less than 100 hours - hhhhh.mm when time is at least 100 hours - ******** when time exceeds 100000 hours - NOTAVAIL when the TOD clock is not working + - The processor time used by the address space, including the initiator. This time does not include SRB time. + - I(elapsed_time) format is hhhhh.mm.ss.SSS(hours.minutes.seconds.milliseconds). + - C(********) when time exceeds 100000 hours. + - C(NOTAVAIL) when the TOD clock is not working. type: str - sample: 812.983S + sample: 00003.20.23.013 started_time: description: - The time when the started task started. + - C(********) when time exceeds 100000 hours. + - C(NOTAVAIL) when the TOD clock is not working. type: str sample: "2025-09-11 18:21:50.293644+00:00" task_id: @@ -489,7 +483,7 @@ def execute_command(operator_cmd, started_task_name, asidx, execute_display_befo return rc, stdout, stderr, task_params -def execute_display_command(started_task_name, asidx=None, before_time=None, timeout=0): +def execute_display_command(started_task_name, asidx=None, task_params_before=None, timeout=0): """Execute operator display command. Parameters @@ -498,8 +492,8 @@ def execute_display_command(started_task_name, asidx=None, before_time=None, tim Name of the started task. asidx : string The HEX adress space identifier. - before_time: datetime - The timestamp when operation started. + task_params_before: list + List of started task details which have same started task name. timeout : int Timeout to wait for the command execution, measured in centiseconds. @@ -512,7 +506,7 @@ def execute_display_command(started_task_name, asidx=None, before_time=None, tim display_response = opercmd.execute(cmd, timeout) task_params = [] if display_response.rc == 0 and display_response.stderr_response == "": - task_params = extract_keys(display_response.stdout_response, asidx, before_time) + task_params = extract_keys(display_response.stdout_response, asidx, task_params_before) return task_params @@ -587,11 +581,13 @@ def validate_and_prepare_start_command(module): else: keyword_parameters_string = f"{key}={value}" if job_name: - started_task_name = job_name + started_task_name = f"{job_name}.{job_name}" elif member: started_task_name = member if identifier: started_task_name = f"{started_task_name}.{identifier}" + else: + started_task_name = f"{started_task_name}.{started_task_name}" else: module.fail_json( rc=5, @@ -685,6 +681,8 @@ def prepare_display_command(module): ------- started_task_name The name of started task. + asidx + The address space identifier value, in hexadecimal. cmd The display command in string format. """ @@ -721,7 +719,7 @@ def prepare_stop_command(module, started_task=None, asidx=None, duplicate_tasks= started_task: string The started task name. asidx : string - The HEX adress space identifier. + The address space identifier value, in hexadecimal. duplicate_tasks: bool Indicates if duplicate tasks are running. @@ -742,6 +740,8 @@ def prepare_stop_command(module, started_task=None, asidx=None, duplicate_tasks= started_task_name = job_name if identifier: started_task_name = f"{started_task_name}.{identifier}" + else: + started_task_name = f"{started_task_name}.{started_task_name}" else: module.fail_json( rc=5, @@ -781,6 +781,8 @@ def prepare_modify_command(module, started_task=None): started_task_name = job_name if identifier: started_task_name = f"{started_task_name}.{identifier}" + else: + started_task_name = f"{started_task_name}.{started_task_name}" else: module.fail_json( rc=5, @@ -807,7 +809,7 @@ def prepare_cancel_command(module, started_task=None, asidx=None, duplicate_task started_task: string The started task name. asidx : string - The HEX adress space identifier. + The address space identifier value, in hexadecimal. duplicate_tasks: bool Indicates if duplicate tasks are running. @@ -831,6 +833,8 @@ def prepare_cancel_command(module, started_task=None, asidx=None, duplicate_task started_task_name = job_name if identifier: started_task_name = f"{started_task_name}.{identifier}" + else: + started_task_name = f"{started_task_name}.{started_task_name}" elif userid: started_task_name = f"U={userid}" else: @@ -865,7 +869,7 @@ def prepare_force_command(module, started_task=None, asidx=None, duplicate_tasks started_task: string The started task name. asidx : string - The HEX adress space identifier. + The address space identifier value, in hexadecimal. duplicate_tasks: bool Indicates if duplicate tasks are running. @@ -908,6 +912,8 @@ def prepare_force_command(module, started_task=None, asidx=None, duplicate_tasks started_task_name = job_name if identifier: started_task_name = f"{started_task_name}.{identifier}" + else: + started_task_name = f"{started_task_name}.{started_task_name}" elif userid: started_task_name = f"U={userid}" else: @@ -930,7 +936,7 @@ def prepare_force_command(module, started_task=None, asidx=None, duplicate_tasks return started_task_name, cmd -def extract_keys(stdout, asidx=None, before_time=None): +def extract_keys(stdout, asidx=None, task_params_before=None): """Extracts keys and values from the given stdout Parameters @@ -938,9 +944,9 @@ def extract_keys(stdout, asidx=None, before_time=None): stdout : string The started task display command output asidx : string - The HEX adress space identifier. - before_time: datetime - The timestamp when operation started. + The address space identifier value, in hexadecimal. + task_params_before: list + List of started task details which have same started task name. Returns ------- @@ -956,24 +962,31 @@ def extract_keys(stdout, asidx=None, before_time=None): lines = stdout.strip().split('\n') tasks = [] current_task = {} - aste_key = "ADDR SPACE ASTE" task_header_regex = re.compile(r'^\s*(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)') - kv_pattern = re.compile(rf'({re.escape(aste_key)}|\S+)=(\S+)') + kv_pattern = re.compile(r'(\S+)=(\S+)') for line in lines[5:]: line = line.strip() match_firstline = task_header_regex.search(line) if len(line.split()) >= 5 and match_firstline: if current_task: + current_task['started_time'] = "" el_time = current_task.get('elapsed_time') if el_time: + current_task['elapsed_time'] = convert_cpu_time(el_time) or current_task['elapsed_time'] current_task['started_time'] = calculate_start_time(el_time) if asidx: if asidx == current_task.get('asidx'): tasks.append(current_task) current_task = {} break - elif before_time: - if before_time < datetime.fromisoformat(current_task.get('started_time')): + elif task_params_before: + current_asid = current_task.get('asidx') + task_exists_before = False + for task in task_params_before: + if task.get('asidx') == current_asid: + task_exists_before = True + break + if not task_exists_before: tasks.append(current_task) else: tasks.append(current_task) @@ -992,14 +1005,25 @@ def extract_keys(stdout, asidx=None, before_time=None): key = keys[key] current_task[key.lower()] = value if current_task: + current_task['started_time'] = "" el_time = current_task.get('elapsed_time') if el_time: + current_task['elapsed_time'] = convert_cpu_time(el_time) or current_task['elapsed_time'] current_task['started_time'] = calculate_start_time(el_time) + cpu_time = current_task.get('cpu_time') + if cpu_time: + current_task['cpu_time'] = convert_cpu_time(cpu_time) or current_task['cpu_time'] if asidx: if asidx == current_task.get('asidx'): tasks.append(current_task) - elif before_time: - if before_time < datetime.fromisoformat(current_task.get('started_time')): + elif task_params_before: + current_asid = current_task.get('asidx') + task_exists_before = False + for task in task_params_before: + if task.get('asidx') == current_asid: + task_exists_before = True + break + if not task_exists_before: tasks.append(current_task) else: tasks.append(current_task) @@ -1019,20 +1043,24 @@ def parse_time(ts_str): timestamp Transformed timestamp """ - # Case 1: Duration like "000.005seconds" - sec_match = re.match(r"^(\d+\.?\d*)\s*S?$", ts_str, re.IGNORECASE) - if sec_match: - return timedelta(seconds=float(sec_match.group(1))) - # Case 2: hh.mm.ss - hms_match = re.match(r"^(\d+).(\d{2}).(\d{2})$", ts_str) - if hms_match: - h, m, s = map(int, hms_match.groups()) - return timedelta(hours=h, minutes=m, seconds=s) - # Case 3: hhhhh.mm - hm_match = re.match(r"^(\d{1,5}).(\d{2})$", ts_str) - if hm_match: - h, m = map(int, hm_match.groups()) - return timedelta(hours=h, minutes=m) + try: + # Case 1: Duration like "000.005seconds" + sec_match = re.match(r"^(\d+\.?\d*)\s*S?$", ts_str, re.IGNORECASE) + if sec_match: + return timedelta(seconds=float(sec_match.group(1))) + # Case 2: hh.mm.ss + hms_match = re.match(r"^(\d+).(\d{2}).(\d{2})$", ts_str) + if hms_match: + h, m, s = map(int, hms_match.groups()) + return timedelta(hours=h, minutes=m, seconds=s) + # Case 3: hhhhh.mm + hm_match = re.match(r"^(\d{1,5}).(\d{2})$", ts_str) + if hm_match: + h, m = map(int, hm_match.groups()) + return timedelta(hours=h, minutes=m) + except Exception: + pass + return "" def calculate_start_time(ts_str): @@ -1043,6 +1071,25 @@ def calculate_start_time(ts_str): # If it's a timedelta (duration), subtract from now → absolute datetime if isinstance(parsed, timedelta): return f"{now - parsed}" + return "" + + +def convert_cpu_time(ts_str): + parsed = parse_time(ts_str) + if parsed is None: + return "" + # If it's a timedelta (duration), subtract from now → absolute datetime + if isinstance(parsed, timedelta): + total_seconds = int(parsed.total_seconds()) + milliseconds = int(parsed.microseconds / 1000) + + hours = total_seconds // 3600 + minutes = (total_seconds % 3600) // 60 + seconds = total_seconds % 60 + + # Format: HHHHH.MM.SS.SSS + return f"{hours:05}.{minutes:02}.{seconds:02}.{milliseconds:03}" + return "" def fetch_logs(command, timeout): @@ -1314,7 +1361,7 @@ def run_module(): duplicate_tasks = False started_task_name_from_id = "" task_info = [] - if task_id and state != "displayed": + if task_id and state != "displayed" and state != "started": task_name, asidx = fetch_task_name_and_asidx(module, task_id) task_params = execute_display_command(task_name) if len(task_params) > 1: @@ -1344,7 +1391,7 @@ def run_module(): start_errmsg = ['IEE122I', 'IEE535I', 'IEE307I', 'ERROR', 'IEE708I'] stop_errmsg = ['IEE341I', 'IEE535I', 'IEE708I'] display_errmsg = ['IEE341I', 'IEE535I', 'NOT FOUND', 'IEE708I'] - modify_errmsg = ['REJECTED', 'IEE341I', 'IEE535I', 'IEE311I', 'IEE708I'] + modify_errmsg = ['REJECTED', 'IEE341I', 'IEE535I', 'IEE311I', 'IEE708I', 'ISF302E'] cancel_errmsg = ['IEE341I', 'IEE324I', 'IEE535I', 'IEE842I', 'NON-CANCELABLE', 'IEE708I'] force_errmsg = ['IEE341I', 'IEE324I', 'IEE535I', 'CANCELABLE', 'IEE842I', 'IEE708I'] error_details = { @@ -1360,7 +1407,8 @@ def run_module(): 'NON-CANCELABLE': 'The task cannot be canceled. Use the FORCE ARM command.', 'CANCELABLE': 'The task can be canceled. Use the CANCEL command.', 'IEE311I': 'Required parameter is missing.', - 'IEE708I': 'The value of a keyword specified on a command is incorrect.' + 'IEE708I': 'The value of a keyword specified on a command is incorrect.', + 'ISF302E': 'Parameters are invalid.' } err_msg = [] kwargs = {} @@ -1369,14 +1417,14 @@ def run_module(): kwargs.update({"wait": True}) cmd = "" - before_time = None + task_params_before = [] execute_display_before = False execute_display_after = False if state == "started": - before_time = datetime.now().astimezone() err_msg = start_errmsg execute_display_after = True started_task_name, cmd = validate_and_prepare_start_command(module) + task_params_before = execute_display_command(started_task_name) elif state == "displayed": err_msg = display_errmsg started_task_name, asidx, cmd = prepare_display_command(module) @@ -1441,7 +1489,7 @@ def run_module(): if state == "displayed": task_params = extract_keys(out, asidx) elif execute_display_after: - task_params = execute_display_command(started_task_name, asidx, before_time) + task_params = execute_display_command(started_task_name, asidx, task_params_before) result = dict() diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py index f088db9dfe..2492f3b178 100644 --- a/tests/functional/modules/test_zos_started_task_func.py +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -135,7 +135,7 @@ def test_start_task_with_invalid_jobaccount(ansible_zos_module): # for result in start_results.contacted.values(): # assert result.get("changed") is False # assert result.get("stderr") is not None -# assert result.get("cmd") == "S SAMPLE,,12345A" +# assert result.get("cmd") == "S SAMPLE.SAMPLE,,12345A" # assert result.get("msg") is not None def test_start_task_with_invalid_parameters(ansible_zos_module): @@ -241,7 +241,7 @@ def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): # for result in start_results.contacted.values(): # assert result.get("changed") is False # assert result.get("stderr") is not None -# assert result.get("cmd") == 'S SAMPLE,/ABCD' +# assert result.get("cmd") == 'S SAMPLE.SAMPLE,/ABCD' # assert result.get("msg") is not None # assert result.get("verbose_output") == "" @@ -471,7 +471,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): assert result.get("changed") is False assert result.get("stderr") is not None assert len(result.get("tasks")) > 0 - assert result.get("cmd") == "FORCE SAMPLE" + assert result.get("cmd") == "FORCE SAMPLE.SAMPLE" assert result.get("msg") is not None assert "CANCELABLE - ISSUE CANCEL BEFORE FORCE" in result.get("stderr") @@ -509,7 +509,7 @@ def test_start_and_cancel_zos_started_task(ansible_zos_module): for result in stop_results.contacted.values(): assert result.get("changed") is False assert result.get("stderr") is not None - assert len(result.get("tasks")) > 0 + assert len(result.get("tasks")) == 0 assert result.get("verbose_output") == "" stop_results = hosts.all.zos_started_task( @@ -662,7 +662,7 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): assert result.get("rc") == 0 assert len(result.get("tasks")) > 0 assert result.get("stderr") == "" - assert result.get("cmd") == "F VLF,REPLACE,NN=00" + assert result.get("cmd") == "F VLF.VLF,REPLACE,NN=00" display_result = hosts.all.zos_started_task( state = "displayed", @@ -688,7 +688,7 @@ def test_stop_and_modify_with_vlf_task(ansible_zos_module): assert result.get("rc") == 0 assert len(result.get("tasks")) > 0 assert result.get("stderr") == "" - assert result.get("cmd") == f"P VLF,A={asid_val}" + assert result.get("cmd") == f"P VLF.VLF,A={asid_val}" start_results = hosts.all.zos_started_task( state = "started", @@ -952,7 +952,7 @@ def test_start_and_cancel_zos_started_task_using_task_id(ansible_zos_module): ) hosts.all.shell( - cmd="dcp {0} \"//'{1}(MSLEEP)'\"".format(data_set_name, PROC_PDS) + cmd="dcp {0} \"//'{1}(TSLEEP)'\"".format(data_set_name, PROC_PDS) ) display_results = hosts.all.zos_started_task( state = "displayed", @@ -965,7 +965,7 @@ def test_start_and_cancel_zos_started_task_using_task_id(ansible_zos_module): start_results = hosts.all.zos_started_task( state = "started", - member = "MSLEEP", + member = "TSLEEP", keyword_parameters = {"SECS": "60"}, verbose = True ) @@ -978,7 +978,7 @@ def test_start_and_cancel_zos_started_task_using_task_id(ansible_zos_module): start_results = hosts.all.zos_started_task( state = "started", - member = "MSLEEP", + member = "TSLEEP", keyword_parameters = {"SECS": "80"}, verbose = True ) @@ -991,7 +991,7 @@ def test_start_and_cancel_zos_started_task_using_task_id(ansible_zos_module): display_results = hosts.all.zos_started_task( state = "displayed", - task = "MSLEEP" + task = "TSLEEP" ) for result in display_results.contacted.values(): @@ -1013,7 +1013,7 @@ def test_start_and_cancel_zos_started_task_using_task_id(ansible_zos_module): assert result.get("stderr") == "" display_results = hosts.all.zos_started_task( state = "displayed", - task = "MSLEEP" + task = "TSLEEP" ) for result in display_results.contacted.values(): @@ -1027,7 +1027,7 @@ def test_start_and_cancel_zos_started_task_using_task_id(ansible_zos_module): cmd="drm {0}".format(data_set_name) ) hosts.all.shell( - cmd="mrm '{0}(MSLEEP)'".format(PROC_PDS) + cmd="mrm '{0}(TSLEEP)'".format(PROC_PDS) ) def test_stop_and_force_with_ICSF_task_using_task_id(ansible_zos_module): @@ -1066,6 +1066,18 @@ def test_stop_and_force_with_ICSF_task_using_task_id(ansible_zos_module): assert result.get("rc") == 0 assert len(result.get("tasks")) > 0 assert result.get("stderr") == "" + modify_results = hosts.all.zos_started_task( + state = "modified", + task_id = task_id, + parameters = ["REFRESH"] + ) + for result in modify_results.contacted.values(): + task_id = result.get('tasks')[0]['task_id'] + asid_val = result.get('tasks')[0]['asidx'] + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" force_results = hosts.all.zos_started_task( state = "forced", task_id = task_id, From c109f532eee8f33b3f574f858b76881f9607a97b Mon Sep 17 00:00:00 2001 From: surendrababuravella <39149274+surendrababuravella@users.noreply.github.com> Date: Fri, 10 Oct 2025 19:19:14 +0530 Subject: [PATCH 73/73] Update zos_started_task.py --- plugins/modules/zos_started_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py index f5b657c1c1..295cdefa94 100644 --- a/plugins/modules/zos_started_task.py +++ b/plugins/modules/zos_started_task.py @@ -1059,7 +1059,7 @@ def parse_time(ts_str): h, m = map(int, hm_match.groups()) return timedelta(hours=h, minutes=m) except Exception: - pass + return "" return ""