diff --git a/.secrets.baseline b/.secrets.baseline new file mode 100644 index 0000000000..3486173fbe --- /dev/null +++ b/.secrets.baseline @@ -0,0 +1,85 @@ +{ + "exclude": { + "files": "zos_mvs_raw.rst|^.secrets.baseline$", + "lines": null + }, + "generated_at": "2025-09-01T16:59:39Z", + "plugins_used": [ + { + "name": "AWSKeyDetector" + }, + { + "name": "ArtifactoryDetector" + }, + { + "name": "AzureStorageKeyDetector" + }, + { + "base64_limit": 4.5, + "name": "Base64HighEntropyString" + }, + { + "name": "BasicAuthDetector" + }, + { + "name": "BoxDetector" + }, + { + "name": "CloudantDetector" + }, + { + "ghe_instance": "github.ibm.com", + "name": "GheDetector" + }, + { + "name": "GitHubTokenDetector" + }, + { + "hex_limit": 3, + "name": "HexHighEntropyString" + }, + { + "name": "IbmCloudIamDetector" + }, + { + "name": "IbmCosHmacDetector" + }, + { + "name": "JwtTokenDetector" + }, + { + "keyword_exclude": null, + "name": "KeywordDetector" + }, + { + "name": "MailchimpDetector" + }, + { + "name": "NpmDetector" + }, + { + "name": "PrivateKeyDetector" + }, + { + "name": "SlackDetector" + }, + { + "name": "SoftlayerDetector" + }, + { + "name": "SquareOAuthDetector" + }, + { + "name": "StripeDetector" + }, + { + "name": "TwilioKeyDetector" + } + ], + "results": {}, + "version": "0.13.1+ibm.62.dss", + "word_list": { + "file": null, + "hash": null + } +} diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 9b39d58dce..eee3829d9a 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,57 +4,8 @@ ibm.ibm\_zos\_core Release Notes .. contents:: Topics -v1.15.0 -======= - -Release Summary ---------------- - -Release Date: '2025-09-30' -This changelog describes all changes made to the modules and plugins included -in this collection. The release date is the date the changelog is created. -For additional details such as required dependencies and availability review -the collections `release notes `__ - -Minor Changes -------------- - -- zos_archive - Adds support for encoding before archiving files. (https://github.com/ansible-collections/ibm_zos_core/pull/2081) -- zos_archive - Adds support for reverting the encoding of a source's files after archiving them. (https://github.com/ansible-collections/ibm_zos_core/pull/2192) -- zos_archive - Adds support for skipping encoding in archive module. This allows users to skip encoding for certain files before archiving them. (https://github.com/ansible-collections/ibm_zos_core/pull/2116) -- zos_copy - Added support for british pound character usage in file content and data set names for both source and destination when copying. (https://github.com/ansible-collections/ibm_zos_core/pull/2153) -- zos_copy - Adds new option `identical_gdg_copy` in the module. This allows copying GDG generations from a source base to a destination base while preserving generation data set absolute names when the destination base does not exist prior to the copy. (https://github.com/ansible-collections/ibm_zos_core/pull/2100). -- zos_copy - Adds support of using alias names in src and dest parameters for PS, PDS and PDSE data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/2103) -- zos_fetch - Updated the documentation to correctly state what the default behavior of the module is. (https://github.com/ansible-collections/ibm_zos_core/pull/2047). -- zos_find - Adds functionality to find migrated data sets. - Adds functionality to find different types of data sets at the same time. (https://github.com/ansible-collections/ibm_zos_core/pull/2073). -- zos_job_output - Adds new fields cpu_time, origin_node and execution_node to response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). -- zos_job_query - Adds new fields cpu_time, origin_node and execution_node to response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). -- zos_job_submit - Adds new fields cpu_time, origin_node and execution_node to response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). -- zos_mvs_raw - Before this addition, you could not put anything in columns 1 or 2, were reserved for JCL processing. Change now allows add reserved_cols option and validate that the module get access to modify dd_content option base on the value, if not retain the previous behavior or work. (https://github.com/ansible-collections/ibm_zos_core/pull/2086) -- zos_mvs_raw - Adds support for volume data definition. (https://github.com/ansible-collections/ibm_zos_core/pull/2194) -- zos_stat - Added support to recall migrated data sets and return its attributes. (https://github.com/ansible-collections/ibm_zos_core/pull/2075) -- zos_stat - Adds new fields that describe the type of the resource that was queried. These new fields are `isfile`, `isdataset`, `isaggregate` and `isgdg`. (https://github.com/ansible-collections/ibm_zos_core/pull/2137) -- zos_stat - Adds support to query data sets using their aliases. (https://github.com/ansible-collections/ibm_zos_core/pull/2061) -- zos_stat - Module now returns whether the resource queried exists on the managed node with the `exists` field inside `stat`. (https://github.com/ansible-collections/ibm_zos_core/pull/2137) -- zos_unarchive - Added encoding support for the unarchive module. This allows users to encode the files after unarchiving them in a perticular encoding. (https://github.com/ansible-collections/ibm_zos_core/pull/2105) - -Bugfixes --------- - -- zos_backup_restore - Return value `backup_name` was empty upon successful result. Fix now returns `backup_name` populated. (https://github.com/ansible-collections/ibm_zos_core/pull/2040). -- zos_data_set - Attempting to create a data set with the same name on a different volume did not work, nor did it report a failure. The fix now informs the user that if the data set is cataloged on a different volume, it needs to be uncataloged before using the data set module to create a new data set on a different volume. (https://github.com/ansible-collections/ibm_zos_core/pull/2057). -- zos_fetch - Previously, the use of `become` would result in a permissions error while trying to fetch a data set or a member. Fix now allows a user to escalate privileges when fetching resources. (https://github.com/ansible-collections/ibm_zos_core/pull/2079) -- zos_lineinfile - Return values ``return_content`` and ``backup_name`` were not always being returned. Fix now ensure that these values are always present in the module's response. (https://github.com/ansible-collections/ibm_zos_core/pull/2120) -- zos_lineinfile - The module would report a false negative when certain special characters where present in the `line` option. Fix now reports the successful operation. (https://github.com/ansible-collections/ibm_zos_core/pull/2080). -- zos_mount - FSUMF168 return in stderror means that the mount dataset wouldn't resolve. While this shows a catalog or volume issue, it should not impact our search for an existing mount. Added handling to the df call, so that FSUMF168 are ignored. (https://github.com/ansible-collections/ibm_zos_core/pull/2060). - -New Modules ------------ - -- ibm.ibm_zos_core.zos_replace - Replace all instances of a pattern within a file or data set. - -v1.14.1 -======= +v1.15.0-beta.1 +============== Release Summary --------------- diff --git a/branch_protection_rules.json b/branch_protection_rules.json new file mode 100644 index 0000000000..ec67bed7dd --- /dev/null +++ b/branch_protection_rules.json @@ -0,0 +1,12 @@ +[{ + "type": "branch-protection", + "name": "code-review", + "params": { + "checks": [ + "tekton/code-branch-protection", + "tekton/code-unit-tests", + "tekton/code-vulnerability-scan", + "tekton/code-detect-secrets" + ] + } +}] \ No newline at end of file diff --git a/changelogs/.plugin-cache.yaml b/changelogs/.plugin-cache.yaml index 817e173dae..2bbc65d7c1 100644 --- a/changelogs/.plugin-cache.yaml +++ b/changelogs/.plugin-cache.yaml @@ -18,133 +18,134 @@ plugins: zos_apf: description: Add or remove libraries to Authorized Program Facility (APF) name: zos_apf - namespace: '' + namespace: "" version_added: 1.3.0 zos_archive: description: Archive files and data sets on z/OS. name: zos_archive - namespace: '' + namespace: "" version_added: 1.7.0 zos_backup_restore: description: Backup and restore data sets and volumes name: zos_backup_restore - namespace: '' + namespace: "" version_added: 1.3.0 zos_blockinfile: description: Manage block of multi-line textual data on z/OS name: zos_blockinfile - namespace: '' + namespace: "" version_added: 1.3.0 zos_copy: description: Copy data to z/OS name: zos_copy - namespace: '' + namespace: "" version_added: 1.2.0 zos_data_set: description: Manage data sets name: zos_data_set - namespace: '' + namespace: "" version_added: 1.3.0 zos_encode: description: Perform encoding operations. name: zos_encode - namespace: '' + namespace: "" version_added: 1.1.0 zos_fetch: description: Fetch data from z/OS name: zos_fetch - namespace: '' + namespace: "" version_added: 1.1.0 zos_find: description: Find matching data sets name: zos_find - namespace: '' + namespace: "" version_added: 1.3.0 zos_gather_facts: description: Gather z/OS system facts. name: zos_gather_facts - namespace: '' + namespace: "" version_added: 1.5.0 zos_job_output: description: Display job output name: zos_job_output - namespace: '' + namespace: "" version_added: 1.0.0 zos_job_query: description: Query job status name: zos_job_query - namespace: '' + namespace: "" version_added: 1.0.0 zos_job_submit: description: Submit JCL name: zos_job_submit - namespace: '' + namespace: "" version_added: 1.0.0 zos_lineinfile: description: Manage textual data on z/OS name: zos_lineinfile - namespace: '' + namespace: "" version_added: 1.2.0 zos_mount: description: Mount a z/OS file system. name: zos_mount - namespace: '' + namespace: "" version_added: 1.4.0 zos_mvs_raw: description: Run a z/OS program. name: zos_mvs_raw - namespace: '' + namespace: "" version_added: 1.1.0 zos_operator: description: Execute operator command name: zos_operator - namespace: '' + namespace: "" version_added: 1.1.0 zos_operator_action_query: description: Display messages requiring action name: zos_operator_action_query - namespace: '' + namespace: "" version_added: 1.1.0 zos_ping: description: Ping z/OS and check dependencies. name: zos_ping - namespace: '' + namespace: "" version_added: 1.1.0 zos_replace: description: Replace all instances of a pattern within a file or data set. name: zos_replace - namespace: '' + namespace: "" version_added: 1.15.0 zos_script: description: Run scripts in z/OS name: zos_script - namespace: '' + namespace: "" version_added: 1.8.0 zos_stat: - description: Retrieve facts from MVS data sets, USS files, aggregates and generation + description: + Retrieve facts from MVS data sets, USS files, aggregates and generation data groups name: zos_stat - namespace: '' + namespace: "" version_added: 1.14.0 zos_tso_command: description: Execute TSO commands name: zos_tso_command - namespace: '' + namespace: "" version_added: 1.1.0 zos_unarchive: description: Unarchive files and data sets in z/OS. name: zos_unarchive - namespace: '' + namespace: "" version_added: 1.7.0 zos_volume_init: description: Initialize volumes or minidisks. name: zos_volume_init - namespace: '' + namespace: "" version_added: 1.6.0 zos_zfs_resize: description: Resize a zfs data set. name: zos_zfs_resize - namespace: '' + namespace: "" version_added: 1.13.0 netconf: {} shell: {} diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 53174cda4a..07cf77545f 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -3,9 +3,9 @@ releases: 1.0.0: changes: minor_changes: - - Documentation updates - - Module zos_data_set catalog support added - release_summary: 'Release Date: ''2020-18-03'' + - Documentation updates + - Module zos_data_set catalog support added + release_summary: "Release Date: '2020-18-03' This changlelog describes all changes made to the modules and plugins included @@ -13,35 +13,35 @@ releases: For additional details such as required dependencies and availablity review - the collections `release notes `__ ' + the collections `release notes `__ " security_fixes: - - Improved test, security and injection coverage - - Security vulnerabilities fixed + - Improved test, security and injection coverage + - Security vulnerabilities fixed fragments: - - v1.0.0_summary.yml - - v1.0.0_summary_minor.yml - - v1.0.0_summary_security.yml + - v1.0.0_summary.yml + - v1.0.0_summary_minor.yml + - v1.0.0_summary_security.yml modules: - - description: Copy data to z/OS - name: zos_copy - namespace: '' - - description: Display job output - name: zos_job_output - namespace: '' - - description: Query job status - name: zos_job_query - namespace: '' - - description: Submit JCL - name: zos_job_submit - namespace: '' - release_date: '2022-06-07' + - description: Copy data to z/OS + name: zos_copy + namespace: "" + - description: Display job output + name: zos_job_output + namespace: "" + - description: Query job status + name: zos_job_query + namespace: "" + - description: Submit JCL + name: zos_job_submit + namespace: "" + release_date: "2022-06-07" 1.1.0: changes: minor_changes: - - Documentation updates - - Improved error handling and messages - - New Filter that will filter a list of WTOR messages based on message text. - release_summary: 'Release Date: ''2020-26-01'' + - Documentation updates + - Improved error handling and messages + - New Filter that will filter a list of WTOR messages based on message text. + release_summary: "Release Date: '2020-26-01' This changlelog describes all changes made to the modules and plugins included @@ -51,36 +51,36 @@ releases: the collections `release notes `__ - ' + " fragments: - - v1.1.0_summary.yml - - v1.1.0_summary_minor.yml + - v1.1.0_summary.yml + - v1.1.0_summary_minor.yml modules: - - description: Perform encoding operations. - name: zos_encode - namespace: '' - - description: Fetch data from z/OS - name: zos_fetch - namespace: '' - - description: Run a z/OS program. - name: zos_mvs_raw - namespace: '' - - description: Execute operator command - name: zos_operator - namespace: '' - - description: Display messages requiring action - name: zos_operator_action_query - namespace: '' - - description: Ping z/OS and check dependencies. - name: zos_ping - namespace: '' - - description: Execute TSO commands - name: zos_tso_command - namespace: '' - release_date: '2022-06-07' + - description: Perform encoding operations. + name: zos_encode + namespace: "" + - description: Fetch data from z/OS + name: zos_fetch + namespace: "" + - description: Run a z/OS program. + name: zos_mvs_raw + namespace: "" + - description: Execute operator command + name: zos_operator + namespace: "" + - description: Display messages requiring action + name: zos_operator_action_query + namespace: "" + - description: Ping z/OS and check dependencies. + name: zos_ping + namespace: "" + - description: Execute TSO commands + name: zos_tso_command + namespace: "" + release_date: "2022-06-07" 1.10.0: changes: - release_summary: 'Release Date: ''2024-06-11'' + release_summary: "Release Date: '2024-06-11' This changelog describes all changes made to the modules and plugins included @@ -88,97 +88,97 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - v1.10.0_summary.yml - release_date: '2024-06-11' + - v1.10.0_summary.yml + release_date: "2024-06-11" 1.10.0-beta.1: changes: breaking_changes: - - zos_archive - option ``terse_pack`` no longer accepts uppercase choices, users - should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_archive - suboption ``record_format`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_archive - suboption ``space_type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_archive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase - choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_backup_restore - option ``space_type`` no longer accepts uppercase choices, - users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_copy - suboption ``record_format`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_copy - suboption ``space_type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase - choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - option ``record_format`` no longer accepts uppercase choices, - users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - option ``space_type`` no longer accepts uppercase choices, - users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - option ``type`` no longer accepts uppercase choices, users - should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_data_set - options inside ``batch`` no longer accept uppercase choices, - users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_job_submit - option ``location`` no longer accepts uppercase choices, - users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``automove`` no longer accepts uppercase choices, users - should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``fs_type`` no longer accepts uppercase choices, users - should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``mount_opts`` no longer accepts uppercase choices, users - should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``tag_untagged`` no longer accepts uppercase choices, users - should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mount - option ``unmount_opts`` no longer accepts uppercase choices, users - should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - options inside ``dd_concat`` no longer accept uppercase choices, - users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``record_format`` of ``dd_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``record_format`` of ``dd_unix`` no longer accepts - uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``space_type`` of ``dd_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts uppercase - choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_mvs_raw - suboptions ``disposition_normal`` and ``disposition_abnormal`` - of ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. - This also applies when defining a ``dd_data_set`` inside ``dd_concat``. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_unarchive - suboption ``record_format`` of ``dest_data_set`` no longer - accepts uppercase choices, users should replace them with lowercase ones. - (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_unarchive - suboption ``space_type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). - - zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts - uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - option ``terse_pack`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_archive - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase + choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_backup_restore - option ``space_type`` no longer accepts uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``record_format`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_copy - suboption ``type`` of ``dest_data_set`` no longer accepts uppercase + choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``record_format`` no longer accepts uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``space_type`` no longer accepts uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - option ``type`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_data_set - options inside ``batch`` no longer accept uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_job_submit - option ``location`` no longer accepts uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``automove`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``fs_type`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``mount_opts`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``tag_untagged`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mount - option ``unmount_opts`` no longer accepts uppercase choices, users + should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - options inside ``dd_concat`` no longer accept uppercase choices, + users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``record_format`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``record_format`` of ``dd_unix`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``space_type`` of ``dd_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboption ``type`` of ``dd_data_set`` no longer accepts uppercase + choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_mvs_raw - suboptions ``disposition_normal`` and ``disposition_abnormal`` + of ``dd_data_set`` no longer accept ``catlg`` and ``uncatlg`` as choices. + This also applies when defining a ``dd_data_set`` inside ``dd_concat``. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``record_format`` of ``dest_data_set`` no longer + accepts uppercase choices, users should replace them with lowercase ones. + (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``space_type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). + - zos_unarchive - suboption ``type`` of ``dest_data_set`` no longer accepts + uppercase choices, users should replace them with lowercase ones. (https://github.com/ansible-collections/ibm_zos_core/pull/1388). bugfixes: - - module_utils/job.py - job output containing non-printable characters would - crash modules. Fix now handles the error gracefully and returns a message - to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1261). - - zos_apf - List option only returned one data set. Fix now returns the list - of retrieved data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1204). - - zos_blockinfile - Using double quotation marks inside a block resulted in - a false positive result with ZOAU 1.3. Fix now handles this special case to - avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/1340). - - zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets - the correct size for PDS/Es. (https://github.com/ansible-collections/ibm_zos_core/pull/1443). - - zos_job_submit - Was ignoring the default value for location=DATA_SET, now - when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1220). - - zos_job_submit - when the argument max_rc was different than 0 the changed - response returned as false. Fix now return a changed response as true when - the rc is not 0 and max_rc is above or equal to the value of the job. (https://github.com/ansible-collections/ibm_zos_core/pull/1345). - - zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating - temporary data sets. Fix now honors the value if provided and uses it as High - Level Qualifier for temporary data sets created during the module execution. - (https://github.com/ansible-collections/ibm_zos_core/pull/1320). + - module_utils/job.py - job output containing non-printable characters would + crash modules. Fix now handles the error gracefully and returns a message + to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1261). + - zos_apf - List option only returned one data set. Fix now returns the list + of retrieved data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1204). + - zos_blockinfile - Using double quotation marks inside a block resulted in + a false positive result with ZOAU 1.3. Fix now handles this special case to + avoid false negatives. (https://github.com/ansible-collections/ibm_zos_core/pull/1340). + - zos_find - Filter size failed if a PDS/E matched the pattern. Fix now gets + the correct size for PDS/Es. (https://github.com/ansible-collections/ibm_zos_core/pull/1443). + - zos_job_submit - Was ignoring the default value for location=DATA_SET, now + when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1220). + - zos_job_submit - when the argument max_rc was different than 0 the changed + response returned as false. Fix now return a changed response as true when + the rc is not 0 and max_rc is above or equal to the value of the job. (https://github.com/ansible-collections/ibm_zos_core/pull/1345). + - zos_mvs_raw - The module ignored the value of `tmp_hlq` option when creating + temporary data sets. Fix now honors the value if provided and uses it as High + Level Qualifier for temporary data sets created during the module execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1320). minor_changes: - - zos_apf - Enhanced error messages when an exception is caught. (https://github.com/ansible-collections/ibm_zos_core/pull/1204). - - zos_backup_restore - Add tmp_hlq option to the user interface to override - the default high level qualifier (HLQ) for temporary and backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1265). - - zos_copy - Documented `group` and `owner` options. (https://github.com/ansible-collections/ibm_zos_core/pull/1307). - - zos_copy - Improve zos_copy performance when copying multiple members from - one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1183). - release_summary: 'Release Date: ''2024-05-08'' + - zos_apf - Enhanced error messages when an exception is caught. (https://github.com/ansible-collections/ibm_zos_core/pull/1204). + - zos_backup_restore - Add tmp_hlq option to the user interface to override + the default high level qualifier (HLQ) for temporary and backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1265). + - zos_copy - Documented `group` and `owner` options. (https://github.com/ansible-collections/ibm_zos_core/pull/1307). + - zos_copy - Improve zos_copy performance when copying multiple members from + one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1183). + release_summary: "Release Date: '2024-05-08' This changelog describes all changes made to the modules and plugins included @@ -186,82 +186,82 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 1032-clean-job_submit-test.yml - - 1152-zos-lineinfile-remove-zos_copy-dependency.yml - - 1156-zos_archive-remove-zos_copy_dep.yml - - 1157-remove-zos-copy-from-zos-encode-tests.yml - - 1165-remove-zos-copy-dep-from-zos-fetch.yml - - 1167-remove-zos-copy-from-zos-blockinfile-tests.yml - - 1169-util-job-zoau-migration.yml - - 1179-remove-zos_encode-from_zos_lineinfile-tests.yml - - 1181-zoau-migration-zos_operator.yml - - 1182-migrate-module-utils-data-set.yml - - 1183-copy-members.yml - - 1184-remove-zos-fetch-dep-from-zos-copy.yml - - 1187-migrate-module-utils-copy.yml - - 1188-migrate-module_utils-backup.yml - - 1189-migrate-module_utils-encode.yml - - 1190-migrate-module_utils-dd_statement.yml - - 1196-zoau-migration-zos_gather_facts.yml - - 1202-doc-gen-script-portability.yml - - 1204-migrate-zos_apf.yml - - 1209-zoau-migration-zos_job_submit.yml - - 1215-Migrate_zos_operator_action_query.yml - - 1216-Validate_module_zos_job_output_migration.yml - - 1217-validate-job-query.yml - - 1218-migrate-zos_encode.yml - - 1220-bugfix-zos_job_submit-default_value.yml - - 1222-zoau-migration-zos_copy.yml - - 1227-migrate-zos_archive.yml - - 1228-zos_find-remove-zos_lineinfile_dep.yml - - 1229-migrate-zos_fetch.yml - - 1237-migrate-zos_mount.yml - - 1238-migrate-zos_unarchive.yml - - 1242-zoau-migration-zos_data_set.yml - - 1256_Migrate_zos_blockinfile_and_lineinfile.yml - - 1257-zoau-import-zos_apf.yml - - 1261-job-submit-non-utf8-chars.yml - - 1265_Migrate_zos_backup_restore.yml - - 1270-quick-fix-len-of-volumes-work-around.yml - - 1286-update-zos_archive-zos_unarchive-docs.yml - - 1295-doc-zos_ping-scp.yml - - 1298-Remove_local_charset_from_zos_fetch.yml - - 1307-update-sanity-zos_copy.yml - - 1320-Zos_mvs_raw_ignores_tmp_hlq.yml - - 1322-update-docstring-encode.yml - - 1331-update-docstring-ickdsf.yml - - 1332-update-docstring-import_handler.yml - - 1333-update-docstring-job.yml - - 1336-update-docstring-validation.yml - - 1340-Work_around_fix_false_positive.yml - - 1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml - - 1347-update-docstring-zos_data_set.yml - - 1348-update-docstring-zos_encode.yml - - 1349-update-docstring-zos_fetch.yml - - 1350-update-docstring-zos_find.yml - - 1351-update-docstring-zos_gather_facts.yml - - 1352-update-docstring-zos_job_output.yml - - 1353-update-docstring-zos_job_query.yml - - 1354-update-docstring-zos_job_submit.yml - - 1355-update-docstring-zos_lineinfile.yml - - 1356-update-docstring-zos_mount.yml - - 1388-lowercase-choices.yml - - 1390-update-docstring-zos_script.yml - - 1391-update-docstring-zos_tso_command.yml - - 1392-update-docstring-zos_volume_init.yml - - 1393-update-docstring-zos_apf.yml - - 1394-Update_docstring-zos_operator_action_query.yml - - 1443-zos_find-filter-size.yml - - 692-changelog-lint-ac-tool.yml - - 971-bug-job_submit-can-stacktrace.yml - - 992-fix-sanity4to6.yml - - v1.10.0-beta.1_summary.yml - release_date: '2024-05-08' + - 1032-clean-job_submit-test.yml + - 1152-zos-lineinfile-remove-zos_copy-dependency.yml + - 1156-zos_archive-remove-zos_copy_dep.yml + - 1157-remove-zos-copy-from-zos-encode-tests.yml + - 1165-remove-zos-copy-dep-from-zos-fetch.yml + - 1167-remove-zos-copy-from-zos-blockinfile-tests.yml + - 1169-util-job-zoau-migration.yml + - 1179-remove-zos_encode-from_zos_lineinfile-tests.yml + - 1181-zoau-migration-zos_operator.yml + - 1182-migrate-module-utils-data-set.yml + - 1183-copy-members.yml + - 1184-remove-zos-fetch-dep-from-zos-copy.yml + - 1187-migrate-module-utils-copy.yml + - 1188-migrate-module_utils-backup.yml + - 1189-migrate-module_utils-encode.yml + - 1190-migrate-module_utils-dd_statement.yml + - 1196-zoau-migration-zos_gather_facts.yml + - 1202-doc-gen-script-portability.yml + - 1204-migrate-zos_apf.yml + - 1209-zoau-migration-zos_job_submit.yml + - 1215-Migrate_zos_operator_action_query.yml + - 1216-Validate_module_zos_job_output_migration.yml + - 1217-validate-job-query.yml + - 1218-migrate-zos_encode.yml + - 1220-bugfix-zos_job_submit-default_value.yml + - 1222-zoau-migration-zos_copy.yml + - 1227-migrate-zos_archive.yml + - 1228-zos_find-remove-zos_lineinfile_dep.yml + - 1229-migrate-zos_fetch.yml + - 1237-migrate-zos_mount.yml + - 1238-migrate-zos_unarchive.yml + - 1242-zoau-migration-zos_data_set.yml + - 1256_Migrate_zos_blockinfile_and_lineinfile.yml + - 1257-zoau-import-zos_apf.yml + - 1261-job-submit-non-utf8-chars.yml + - 1265_Migrate_zos_backup_restore.yml + - 1270-quick-fix-len-of-volumes-work-around.yml + - 1286-update-zos_archive-zos_unarchive-docs.yml + - 1295-doc-zos_ping-scp.yml + - 1298-Remove_local_charset_from_zos_fetch.yml + - 1307-update-sanity-zos_copy.yml + - 1320-Zos_mvs_raw_ignores_tmp_hlq.yml + - 1322-update-docstring-encode.yml + - 1331-update-docstring-ickdsf.yml + - 1332-update-docstring-import_handler.yml + - 1333-update-docstring-job.yml + - 1336-update-docstring-validation.yml + - 1340-Work_around_fix_false_positive.yml + - 1345-max_rc_more_than_0_doesn_not_put_change_as_true.yml + - 1347-update-docstring-zos_data_set.yml + - 1348-update-docstring-zos_encode.yml + - 1349-update-docstring-zos_fetch.yml + - 1350-update-docstring-zos_find.yml + - 1351-update-docstring-zos_gather_facts.yml + - 1352-update-docstring-zos_job_output.yml + - 1353-update-docstring-zos_job_query.yml + - 1354-update-docstring-zos_job_submit.yml + - 1355-update-docstring-zos_lineinfile.yml + - 1356-update-docstring-zos_mount.yml + - 1388-lowercase-choices.yml + - 1390-update-docstring-zos_script.yml + - 1391-update-docstring-zos_tso_command.yml + - 1392-update-docstring-zos_volume_init.yml + - 1393-update-docstring-zos_apf.yml + - 1394-Update_docstring-zos_operator_action_query.yml + - 1443-zos_find-filter-size.yml + - 692-changelog-lint-ac-tool.yml + - 971-bug-job_submit-can-stacktrace.yml + - 992-fix-sanity4to6.yml + - v1.10.0-beta.1_summary.yml + release_date: "2024-05-08" 1.11.0: changes: - release_summary: 'Release Date: ''2024-10-01'' + release_summary: "Release Date: '2024-10-01' This changelog describes all changes made to the modules and plugins included @@ -269,76 +269,76 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - v1.11.0_summary.yml - release_date: '2024-09-25' + - v1.11.0_summary.yml + release_date: "2024-09-25" 1.11.0-beta.1: changes: bugfixes: - - module_util/data_set.py - DataSet.data_set_cataloged function previously only - returned True or False, but failed to account for exceptions which occurred - during the LISTCAT. The fix now raises an MVSCmdExecError if the return code - from LISTCAT is too high. (https://github.com/ansible-collections/ibm_zos_core/pull/1535). - - zos_copy - a regression in version 1.4.0 made the module stop automatically - computing member names when copying a single file into a PDS/E. Fix now lets - a user copy a single file into a PDS/E without adding a member in the dest - option. (https://github.com/ansible-collections/ibm_zos_core/pull/1570). - - zos_copy - module would use opercmd to check if a non existent destination - data set is locked. Fix now only checks if the destination is already present. - (https://github.com/ansible-collections/ibm_zos_core/pull/1623). - - zos_job_submit - Was not propagating any error types UnicodeDecodeError, JSONDecodeError, - TypeError, KeyError when encountered, now the error message shares the type - error. (https://github.com/ansible-collections/ibm_zos_core/pull/1560). - - zos_mvs_raw - DD_output first character from each line was missing. Change - now includes the first character of each line. (https://github.com/ansible-collections/ibm_zos_core/pull/1543). + - module_util/data_set.py - DataSet.data_set_cataloged function previously only + returned True or False, but failed to account for exceptions which occurred + during the LISTCAT. The fix now raises an MVSCmdExecError if the return code + from LISTCAT is too high. (https://github.com/ansible-collections/ibm_zos_core/pull/1535). + - zos_copy - a regression in version 1.4.0 made the module stop automatically + computing member names when copying a single file into a PDS/E. Fix now lets + a user copy a single file into a PDS/E without adding a member in the dest + option. (https://github.com/ansible-collections/ibm_zos_core/pull/1570). + - zos_copy - module would use opercmd to check if a non existent destination + data set is locked. Fix now only checks if the destination is already present. + (https://github.com/ansible-collections/ibm_zos_core/pull/1623). + - zos_job_submit - Was not propagating any error types UnicodeDecodeError, JSONDecodeError, + TypeError, KeyError when encountered, now the error message shares the type + error. (https://github.com/ansible-collections/ibm_zos_core/pull/1560). + - zos_mvs_raw - DD_output first character from each line was missing. Change + now includes the first character of each line. (https://github.com/ansible-collections/ibm_zos_core/pull/1543). minor_changes: - - zos_apf - Change input to auto-escape 'library' names containing symbols (https://github.com/ansible-collections/ibm_zos_core/pull/1493). - - zos_archive - Added support for GDG and GDS relative name notation to archive - data sets. Added support for data set names with special characters like $, - /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1511). - - zos_backup_restore - Added support for GDS relative name notation to include - or exclude data sets when operation is backup. Added support for data set - names with special characters like $, /#, and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1527). - - zos_blockinfile - Added support for GDG and GDS relative name notation to - use a data set. And backup in new generations. Added support for data set - names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1516). - - zos_copy - add support for copying generation data sets (GDS) and generation - data groups (GDG), as well as using a GDS for backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1564). - - zos_data_set - Added support for GDG and GDS relative name notation to create, - delete, catalog and uncatalog a data set. Added support for data set names - with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1504). - - zos_encode - add support for encoding generation data sets (GDS), as well - as using a GDS for backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1531). - - zos_fetch - add support for fetching generation data groups and generation - data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1519) - - zos_find - added support for GDG/GDS and special characters (https://github.com/ansible-collections/ibm_zos_core/pull/1518). - - zos_job_submit - Improved the copy to remote mechanic to avoid using deepcopy - that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). - - zos_job_submit - add support for generation data groups and generation data - sets as sources for jobs. (https://github.com/ansible-collections/ibm_zos_core/pull/1497) - - zos_lineinfile - Added support for GDG and GDS relative name notation to use - a data set. And backup in new generations. Added support for data set names - with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1516). - - zos_mount - Added support for data set names with special characters ($, /#, - /- and @). This is for both src and backup data set names. (https://github.com/ansible-collections/ibm_zos_core/pull/1631). - - zos_mvs_raw - Added support for GDG and GDS relative name notation to use - a data set. Added support for data set names with special characters like - $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1525). - - zos_mvs_raw - Added support for GDG and GDS relative positive name notation - to use a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1541). - - zos_mvs_raw - Redesign the wrappers of dd clases to use properly the arguments. - (https://github.com/ansible-collections/ibm_zos_core/pull/1470). - - zos_script - Improved the copy to remote mechanic to avoid using deepcopy - that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). - - zos_tso_command - Added support for GDG and GDS relative name notation to - use a data set name. Added support for data set names with special characters - like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1563). - - zos_unarchive - Added support for data set names with special characters like - $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1511). - - zos_unarchive - Improved the copy to remote mechanic to avoid using deepcopy - that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). - release_summary: 'Release Date: ''2024-08-05'' + - zos_apf - Change input to auto-escape 'library' names containing symbols (https://github.com/ansible-collections/ibm_zos_core/pull/1493). + - zos_archive - Added support for GDG and GDS relative name notation to archive + data sets. Added support for data set names with special characters like $, + /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1511). + - zos_backup_restore - Added support for GDS relative name notation to include + or exclude data sets when operation is backup. Added support for data set + names with special characters like $, /#, and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1527). + - zos_blockinfile - Added support for GDG and GDS relative name notation to + use a data set. And backup in new generations. Added support for data set + names with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1516). + - zos_copy - add support for copying generation data sets (GDS) and generation + data groups (GDG), as well as using a GDS for backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1564). + - zos_data_set - Added support for GDG and GDS relative name notation to create, + delete, catalog and uncatalog a data set. Added support for data set names + with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1504). + - zos_encode - add support for encoding generation data sets (GDS), as well + as using a GDS for backup. (https://github.com/ansible-collections/ibm_zos_core/pull/1531). + - zos_fetch - add support for fetching generation data groups and generation + data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1519) + - zos_find - added support for GDG/GDS and special characters (https://github.com/ansible-collections/ibm_zos_core/pull/1518). + - zos_job_submit - Improved the copy to remote mechanic to avoid using deepcopy + that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). + - zos_job_submit - add support for generation data groups and generation data + sets as sources for jobs. (https://github.com/ansible-collections/ibm_zos_core/pull/1497) + - zos_lineinfile - Added support for GDG and GDS relative name notation to use + a data set. And backup in new generations. Added support for data set names + with special characters like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1516). + - zos_mount - Added support for data set names with special characters ($, /#, + /- and @). This is for both src and backup data set names. (https://github.com/ansible-collections/ibm_zos_core/pull/1631). + - zos_mvs_raw - Added support for GDG and GDS relative name notation to use + a data set. Added support for data set names with special characters like + $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1525). + - zos_mvs_raw - Added support for GDG and GDS relative positive name notation + to use a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1541). + - zos_mvs_raw - Redesign the wrappers of dd clases to use properly the arguments. + (https://github.com/ansible-collections/ibm_zos_core/pull/1470). + - zos_script - Improved the copy to remote mechanic to avoid using deepcopy + that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). + - zos_tso_command - Added support for GDG and GDS relative name notation to + use a data set name. Added support for data set names with special characters + like $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1563). + - zos_unarchive - Added support for data set names with special characters like + $, /#, /- and @. (https://github.com/ansible-collections/ibm_zos_core/pull/1511). + - zos_unarchive - Improved the copy to remote mechanic to avoid using deepcopy + that could result in failure for some systems. (https://github.com/ansible-collections/ibm_zos_core/pull/1561). + release_summary: "Release Date: '2024-08-05' This changelog describes all changes made to the modules and plugins included @@ -346,75 +346,75 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 1170-enhancememt-make-pipeline-217-compatible.yml - - 1323-Update_docstring-dd_statement.yml - - 1334-update-docstring-mcs_cmd.yml - - 1335-update-docstring-template.yml - - 1337-update-docstring-vtoc.yml - - 1338-update-docstring-zoau_version_checker.yml - - 1342-update-docstring-zos_backup_restore.yml - - 1343-update-docstring-zos_blockinline.yml - - 1344-update-docstring-zos_copy.yml - - 1361-update-docstring-zos_operator.yml - - 1362-update-docstring-file.yml - - 1363-update-docstring-system.yml - - 1374-enhancement-zos-find-gdg-gds-special-chars.yml - - 1380-enhancement-add-sybols-zos_apf.yml - - 1384-update-docstring-backup.yml - - 1385-update-docstring-better_arg_parser.yml - - 1386-gdg-symbols-support.yml - - 1387-update-docstring-copy.yml - - 1415-Update_docstring-zos_archive.yml - - 1470-redesign_mvs_raw.yml - - 1484-update-ac-tool-ansible-lint.yml - - 1488-zos_copy-refactor-force.yml - - 1495-default-values-data-set-class.yml - - 1496-fix-gds-resolve.yml - - 1497-gdg-support-zos-job-submit.yml - - 1504-zos_data_set-gdg-support.yml - - 1507-zos_operator-docs.yml - - 1511-zos_archive_unarchive-gdg-support.yml - - 1512-bugfix-zos_job_submit-error-type.yml - - 1515-gdg_batch_creation.yml - - 1516-lineinfile_blockinfile_gdgsgds_and_special_character_support.yml - - 1519-zos_fetch-gdg-support.yml - - 1525-mvs_raw_support_gdg_gds_special_character.yml - - 1527-zos_backup-gdg.yml - - 1531-zos_encode_gdg_support.yml - - 1535-raise-error-in-module-util-data_set-function-data_set_cataloged.yml - - 1541-output_mvs_raw_gds_positive_was_false_positive.yml - - 1543-mvs_raw_fix_verbose_and_first_character.yml - - 1550-lower_case_idcams_utility.yml - - 1552-readme-support-updates.yml - - 1553-Console_parallel.yml - - 1561-remove_deep_copy.yml - - 1563-zos_tso_command-gdg-support.yml - - 1564-zos_copy_gdg_support.yml - - 1565-remove-deprecated-pipes-library.yml - - 1570-compute-member-name-zos_copy.yml - - 1623-zos_copy-avoid-opercmd.yml - - 1631-enabler-zos_mount-special-character-support.yml - - v1.11.0-beta.1_summary.yml - release_date: '2024-08-05' + - 1170-enhancememt-make-pipeline-217-compatible.yml + - 1323-Update_docstring-dd_statement.yml + - 1334-update-docstring-mcs_cmd.yml + - 1335-update-docstring-template.yml + - 1337-update-docstring-vtoc.yml + - 1338-update-docstring-zoau_version_checker.yml + - 1342-update-docstring-zos_backup_restore.yml + - 1343-update-docstring-zos_blockinline.yml + - 1344-update-docstring-zos_copy.yml + - 1361-update-docstring-zos_operator.yml + - 1362-update-docstring-file.yml + - 1363-update-docstring-system.yml + - 1374-enhancement-zos-find-gdg-gds-special-chars.yml + - 1380-enhancement-add-sybols-zos_apf.yml + - 1384-update-docstring-backup.yml + - 1385-update-docstring-better_arg_parser.yml + - 1386-gdg-symbols-support.yml + - 1387-update-docstring-copy.yml + - 1415-Update_docstring-zos_archive.yml + - 1470-redesign_mvs_raw.yml + - 1484-update-ac-tool-ansible-lint.yml + - 1488-zos_copy-refactor-force.yml + - 1495-default-values-data-set-class.yml + - 1496-fix-gds-resolve.yml + - 1497-gdg-support-zos-job-submit.yml + - 1504-zos_data_set-gdg-support.yml + - 1507-zos_operator-docs.yml + - 1511-zos_archive_unarchive-gdg-support.yml + - 1512-bugfix-zos_job_submit-error-type.yml + - 1515-gdg_batch_creation.yml + - 1516-lineinfile_blockinfile_gdgsgds_and_special_character_support.yml + - 1519-zos_fetch-gdg-support.yml + - 1525-mvs_raw_support_gdg_gds_special_character.yml + - 1527-zos_backup-gdg.yml + - 1531-zos_encode_gdg_support.yml + - 1535-raise-error-in-module-util-data_set-function-data_set_cataloged.yml + - 1541-output_mvs_raw_gds_positive_was_false_positive.yml + - 1543-mvs_raw_fix_verbose_and_first_character.yml + - 1550-lower_case_idcams_utility.yml + - 1552-readme-support-updates.yml + - 1553-Console_parallel.yml + - 1561-remove_deep_copy.yml + - 1563-zos_tso_command-gdg-support.yml + - 1564-zos_copy_gdg_support.yml + - 1565-remove-deprecated-pipes-library.yml + - 1570-compute-member-name-zos_copy.yml + - 1623-zos_copy-avoid-opercmd.yml + - 1631-enabler-zos_mount-special-character-support.yml + - v1.11.0-beta.1_summary.yml + release_date: "2024-08-05" 1.12.0: changes: bugfixes: - - zos_find - Module would not find VSAM data and index resource types. Fix now - finds the data and index resource types. (https://github.com/ansible-collections/ibm_zos_core/pull/1818). - - zos_find - Module would not find a VSAM cluster resource type if it was in - use with DISP=OLD. Fix now finds the VSAM cluster. (https://github.com/ansible-collections/ibm_zos_core/pull/1818). - - zos_mvs_raw - If a program failed with a non-zero return code and verbose - was false, the module would succeed (false positive). Fix now fails the module - for all instances where a program has a non-zero return code. (https://github.com/ansible-collections/ibm_zos_core/pull/1780). - - zos_mvs_raw - Module would obfuscate the return code from the program when - failing returning 8 instead. Fix now returns the proper return code from the - program. (https://github.com/ansible-collections/ibm_zos_core/pull/1780). - - zos_mvs_raw - Module would return the stderr content in stdout when verbose - was true and return code was 0. Fix now does not replace stdout content with - stderr. (https://github.com/ansible-collections/ibm_zos_core/pull/1800). - release_summary: 'Release Date: ''2024-12-06'' + - zos_find - Module would not find VSAM data and index resource types. Fix now + finds the data and index resource types. (https://github.com/ansible-collections/ibm_zos_core/pull/1818). + - zos_find - Module would not find a VSAM cluster resource type if it was in + use with DISP=OLD. Fix now finds the VSAM cluster. (https://github.com/ansible-collections/ibm_zos_core/pull/1818). + - zos_mvs_raw - If a program failed with a non-zero return code and verbose + was false, the module would succeed (false positive). Fix now fails the module + for all instances where a program has a non-zero return code. (https://github.com/ansible-collections/ibm_zos_core/pull/1780). + - zos_mvs_raw - Module would obfuscate the return code from the program when + failing returning 8 instead. Fix now returns the proper return code from the + program. (https://github.com/ansible-collections/ibm_zos_core/pull/1780). + - zos_mvs_raw - Module would return the stderr content in stdout when verbose + was true and return code was 0. Fix now does not replace stdout content with + stderr. (https://github.com/ansible-collections/ibm_zos_core/pull/1800). + release_summary: "Release Date: '2024-12-06' This changelog describes all changes made to the modules and plugins included @@ -424,87 +424,87 @@ releases: the collections `release notes `__ - ' + " fragments: - - 1780-zos_mvs_raw-verbose-fail.yml - - 1800-zos_mvs_raw-stdout-replaced-by-stderr.yml - - 1818-zos_find-vsam-disp-old.yml - - v1.12.0_summary.yml - release_date: '2024-12-04' + - 1780-zos_mvs_raw-verbose-fail.yml + - 1800-zos_mvs_raw-stdout-replaced-by-stderr.yml + - 1818-zos_find-vsam-disp-old.yml + - v1.12.0_summary.yml + release_date: "2024-12-04" 1.12.0-beta.1: changes: bugfixes: - - zos_apf - The ``tmp_hlq`` option was previously ignored and default values - were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_archive - The ``tmp_hlq`` option was previously ignored and default values - were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_backup_restore - When a recoverable error was encountered and ``recover=True``, - the module would ignore the option and fail. Fix now does not fail when a - recoverable error is raised when ``recover=True``. (https://github.com/ansible-collections/ibm_zos_core/pull/1643). - - zos_blockinfile - Previously module was not able to delete a block when 'marker_begin' - and 'marker_end' were set to the same value. Fix introduces a requirement - for 'marker_begin' and 'marker_end' to have different values. (https://github.com/ansible-collections/ibm_zos_core/pull/1684). - - zos_blockinfile - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_copy - Improve module zos_copy error handling when the user does not have - universal access authority set to UACC(READ) for SAF Profile 'MVS.MCSOPER.ZOAU' - and SAF Class OPERCMDS. The module now handles the exception and returns an - informative message. (https://github.com/ansible-collections/ibm_zos_core/pull/1766). - - zos_copy - Previously, the module ignored the value of ``remote_tmp`` set - in Ansible configuration file and used the ``/tmp/`` directory. Fix now uses - the value of ``remote_tmp`` or the default value ``~/.ansible/tmp`` if none - is given. (https://github.com/ansible-collections/ibm_zos_core/pull/1739). - - zos_copy - The ``tmp_hlq`` option was previously ignored and default values - were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_data_set - The ``tmp_hlq`` option was previously ignored and default values - were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_encode - The ``tmp_hlq`` option was previously ignored and default values - were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_fetch - The ``tmp_hlq`` option was previously ignored and default values - were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_job_output - RACF user names containing a ``@``, ``$``, or ``#`` raised - an invalid argument error. Fix now allows the use of all valid characters - for a RACF user. (https://github.com/ansible-collections/ibm_zos_core/pull/1661). - - zos_job_query - Module was not returning values for system and subsystem. - Fix now returns these values. (https://github.com/ansible-collections/ibm_zos_core/pull/1761). - - zos_job_query - RACF user names containing a ``@``, ``$``, or ``#`` raised - an invalid argument error. Fix now allows the use of all valid characters - for a RACF user. (https://github.com/ansible-collections/ibm_zos_core/pull/1661). - - zos_lineinfile - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_mount - The ``tmp_hlq`` option was previously ignored and default values - were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_mvs_raw - base64 sub-option for return_content under option for retrieving - DD output did not return base64. Fix now returns the base64 encoded contents - of the DD. (https://github.com/ansible-collections/ibm_zos_core/pull/1691). - - zos_script - The module would discard command line arguments in a command, - except for the first one. Fix now makes sure that all arguments are passed - to the remote command that gets executed. (https://github.com/ansible-collections/ibm_zos_core/pull/1698). - - zos_unarchive - The ``tmp_hlq`` option was previously ignored and default - values were used instead. Fix now honors the value set in the module option. - (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_apf - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_archive - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_backup_restore - When a recoverable error was encountered and ``recover=True``, + the module would ignore the option and fail. Fix now does not fail when a + recoverable error is raised when ``recover=True``. (https://github.com/ansible-collections/ibm_zos_core/pull/1643). + - zos_blockinfile - Previously module was not able to delete a block when 'marker_begin' + and 'marker_end' were set to the same value. Fix introduces a requirement + for 'marker_begin' and 'marker_end' to have different values. (https://github.com/ansible-collections/ibm_zos_core/pull/1684). + - zos_blockinfile - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_copy - Improve module zos_copy error handling when the user does not have + universal access authority set to UACC(READ) for SAF Profile 'MVS.MCSOPER.ZOAU' + and SAF Class OPERCMDS. The module now handles the exception and returns an + informative message. (https://github.com/ansible-collections/ibm_zos_core/pull/1766). + - zos_copy - Previously, the module ignored the value of ``remote_tmp`` set + in Ansible configuration file and used the ``/tmp/`` directory. Fix now uses + the value of ``remote_tmp`` or the default value ``~/.ansible/tmp`` if none + is given. (https://github.com/ansible-collections/ibm_zos_core/pull/1739). + - zos_copy - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_data_set - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_encode - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_fetch - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_job_output - RACF user names containing a ``@``, ``$``, or ``#`` raised + an invalid argument error. Fix now allows the use of all valid characters + for a RACF user. (https://github.com/ansible-collections/ibm_zos_core/pull/1661). + - zos_job_query - Module was not returning values for system and subsystem. + Fix now returns these values. (https://github.com/ansible-collections/ibm_zos_core/pull/1761). + - zos_job_query - RACF user names containing a ``@``, ``$``, or ``#`` raised + an invalid argument error. Fix now allows the use of all valid characters + for a RACF user. (https://github.com/ansible-collections/ibm_zos_core/pull/1661). + - zos_lineinfile - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_mount - The ``tmp_hlq`` option was previously ignored and default values + were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_mvs_raw - base64 sub-option for return_content under option for retrieving + DD output did not return base64. Fix now returns the base64 encoded contents + of the DD. (https://github.com/ansible-collections/ibm_zos_core/pull/1691). + - zos_script - The module would discard command line arguments in a command, + except for the first one. Fix now makes sure that all arguments are passed + to the remote command that gets executed. (https://github.com/ansible-collections/ibm_zos_core/pull/1698). + - zos_unarchive - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). minor_changes: - - zos_backup_restore - Redefines the default behavior of module option `hlq`. - When option `operation` is set to `restore` and the `hlq` is not provided, - the original high level qualifiers in a backup will be used for a restore. - (https://github.com/ansible-collections/ibm_zos_core/pull/1632). - - zos_job_output - Added address space type used by jobs in return JSON as `content_type`. - (https://github.com/ansible-collections/ibm_zos_core/pull/1673). - - zos_job_query - Added address space type used by jobs in return JSON as `content_type`. - (https://github.com/ansible-collections/ibm_zos_core/pull/1673). - - zos_job_submit - Added address space type used by jobs in return JSON as `content_type`. - (https://github.com/ansible-collections/ibm_zos_core/pull/1673). - - zos_mvs_raw - Un-mappable chars in stdout/stderr streams are now replaced - with the replacement character. (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - - zos_operator - Added new option ``case_sensitive`` to module, allowing users - to control how case in a command is handled by it. (https://github.com/ansible-collections/ibm_zos_core/pull/1641) - - zos_script - Un-mappable chars in stdout/stderr streams are now replaced with - the replacement character. (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - - zos_tso_command - Un-mappable chars in stdout/stderr streams are now replaced - with the replacement character. (https://github.com/ansible-collections/ibm_zos_core/pull/1634). - release_summary: 'Release Date: ''2024-10-31'' + - zos_backup_restore - Redefines the default behavior of module option `hlq`. + When option `operation` is set to `restore` and the `hlq` is not provided, + the original high level qualifiers in a backup will be used for a restore. + (https://github.com/ansible-collections/ibm_zos_core/pull/1632). + - zos_job_output - Added address space type used by jobs in return JSON as `content_type`. + (https://github.com/ansible-collections/ibm_zos_core/pull/1673). + - zos_job_query - Added address space type used by jobs in return JSON as `content_type`. + (https://github.com/ansible-collections/ibm_zos_core/pull/1673). + - zos_job_submit - Added address space type used by jobs in return JSON as `content_type`. + (https://github.com/ansible-collections/ibm_zos_core/pull/1673). + - zos_mvs_raw - Un-mappable chars in stdout/stderr streams are now replaced + with the replacement character. (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_operator - Added new option ``case_sensitive`` to module, allowing users + to control how case in a command is handled by it. (https://github.com/ansible-collections/ibm_zos_core/pull/1641) + - zos_script - Un-mappable chars in stdout/stderr streams are now replaced with + the replacement character. (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + - zos_tso_command - Un-mappable chars in stdout/stderr streams are now replaced + with the replacement character. (https://github.com/ansible-collections/ibm_zos_core/pull/1634). + release_summary: "Release Date: '2024-10-31' This changelog describes all changes made to the modules and plugins included @@ -512,53 +512,53 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 1632-Validate_to_restore_keep_orginial_hlq.yml - - 1633-zos_mvs_raw_tests_portability.yml - - 1634-updates-for-non-utf8-depr-warning.yml - - 1635-backup_restore_portability.yml - - 1639-zos_tso_command_portability.yml - - 1641-case-sensitivity-zos_operator.yml - - 1642-Ensure_portability_zos_encode.yml - - 1643-Validate_parameter_recover_to_tolerate_enqueue.yml - - 1647-doc-backup-restore-racf-class.yml - - 1654-zos_apf_tests_change_temphlq.yml - - 1656-zos_find_portability.yml - - 1657-test_fetch_portability.yml - - 1658-job_submit_portability.yml - - 1661-job-owner-valid-characters.yml - - 1664-portability-zos_copy.yml - - 1673-return-job-type.yml - - 1676-portability_zos_blockinfile.yml - - 1677-zos_job_query_portability.yaml - - 1684-Add_validation_for_marker_begin_end.yml - - 1687-lineinfile_portability.yml - - 1689-add-non-utf8-testcase.yml - - 1691-zos-mvs-raw-base64-mode.yml - - 1695-tmp_hlq_when_calling_mvscmd.yml - - 1698-multiple-args-zos_script.yml - - 1739-tmp_files_not_use_tmp_folder.yml - - 1761-system-subsystem-job_query.yml - - 1766-zos_copy-racf-uacc-updates.yml - - 828-adds-concurrent-executor.yml - - v1.12.0-beta.1_summary.yml - release_date: '2024-10-24' + - 1632-Validate_to_restore_keep_orginial_hlq.yml + - 1633-zos_mvs_raw_tests_portability.yml + - 1634-updates-for-non-utf8-depr-warning.yml + - 1635-backup_restore_portability.yml + - 1639-zos_tso_command_portability.yml + - 1641-case-sensitivity-zos_operator.yml + - 1642-Ensure_portability_zos_encode.yml + - 1643-Validate_parameter_recover_to_tolerate_enqueue.yml + - 1647-doc-backup-restore-racf-class.yml + - 1654-zos_apf_tests_change_temphlq.yml + - 1656-zos_find_portability.yml + - 1657-test_fetch_portability.yml + - 1658-job_submit_portability.yml + - 1661-job-owner-valid-characters.yml + - 1664-portability-zos_copy.yml + - 1673-return-job-type.yml + - 1676-portability_zos_blockinfile.yml + - 1677-zos_job_query_portability.yaml + - 1684-Add_validation_for_marker_begin_end.yml + - 1687-lineinfile_portability.yml + - 1689-add-non-utf8-testcase.yml + - 1691-zos-mvs-raw-base64-mode.yml + - 1695-tmp_hlq_when_calling_mvscmd.yml + - 1698-multiple-args-zos_script.yml + - 1739-tmp_files_not_use_tmp_folder.yml + - 1761-system-subsystem-job_query.yml + - 1766-zos_copy-racf-uacc-updates.yml + - 828-adds-concurrent-executor.yml + - v1.12.0-beta.1_summary.yml + release_date: "2024-10-24" 1.13.0: changes: bugfixes: - - zos_copy - Previously, if the dataset name included special characters such - as $, validation would fail when force_lock was false. This has been changed - to allow the use of special characters when force_lock option is false. (https://github.com/ansible-collections/ibm_zos_core/pull/1936) - - zos_copy - Previously, if the dataset name included special characters such - as ``$`` and ``asa_text`` option is true, the module would fail. Fix now allows - the use of special characters in the data set name when ``asa_text`` option - is true. (https://github.com/ansible-collections/ibm_zos_core/pull/1924). - - zos_copy - When ``asa_text`` was set to true at the same time as ``force_lock``, - a copy would fail saying the destination was already in use. Fix now opens - destination data sets up with disposition SHR when ``force_lock`` and ``asa_text`` - are set to true. (https://github.com/ansible-collections/ibm_zos_core/pull/1939). - release_summary: 'Release Date: ''2025-03-31'' + - zos_copy - Previously, if the dataset name included special characters such + as $, validation would fail when force_lock was false. This has been changed + to allow the use of special characters when force_lock option is false. (https://github.com/ansible-collections/ibm_zos_core/pull/1936) + - zos_copy - Previously, if the dataset name included special characters such + as ``$`` and ``asa_text`` option is true, the module would fail. Fix now allows + the use of special characters in the data set name when ``asa_text`` option + is true. (https://github.com/ansible-collections/ibm_zos_core/pull/1924). + - zos_copy - When ``asa_text`` was set to true at the same time as ``force_lock``, + a copy would fail saying the destination was already in use. Fix now opens + destination data sets up with disposition SHR when ``force_lock`` and ``asa_text`` + are set to true. (https://github.com/ansible-collections/ibm_zos_core/pull/1939). + release_summary: "Release Date: '2025-03-31' This changelog describes all changes made to the modules and plugins included @@ -566,74 +566,74 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 1924-zos_copy-asa-special-char.yml - - 1926-fix-beta-branch.yml - - 1936-wrong_behaviour_force_lock_parameter.yml - - 1939-zos_copy_asa-dest-lock-support.yml - - v1.13.0_summary.yml - release_date: '2025-03-25' + - 1924-zos_copy-asa-special-char.yml + - 1926-fix-beta-branch.yml + - 1936-wrong_behaviour_force_lock_parameter.yml + - 1939-zos_copy_asa-dest-lock-support.yml + - v1.13.0_summary.yml + release_date: "2025-03-25" 1.13.0-beta.1: changes: bugfixes: - - zos_copy - Improve module zos_copy error handling when the user does not have - universal access authority set to UACC(READ) for SAF Profile 'MVS.MCSOPER.ZOAU' - and SAF Class OPERCMDS. The module now handles the exception and returns an - informative message. (https://github.com/ansible-collections/ibm_zos_core/pull/1744). - - zos_fetch - Some relative paths were not accepted as a parameter e.g. C(files/fetched_file). - Change now allows the user to use different types of relative paths as a parameter. - (https://github.com/ansible-collections/ibm_zos_core/pull/1769). - - zos_find - Module would not find VSAM data and index resource types. Fix now - finds the data and index resource types. (https://github.com/ansible-collections/ibm_zos_core/pull/1822). - - zos_find - Module would not find a VSAM cluster resource type if it was in - use with DISP=OLD. Fix now finds the VSAM cluster. (https://github.com/ansible-collections/ibm_zos_core/pull/1822). - - zos_job_query - Module was not returning values for system and subsystem. - Fix now returns these values. (https://github.com/ansible-collections/ibm_zos_core/pull/1759). - - zos_mvs_raw - If a program failed with a non-zero return code and verbose - was false, the module would succeed. Whereas, if the program failed and verbose - was true the module would fail. Fix now has a consistent behavior and fails - in both cases. (https://github.com/ansible-collections/ibm_zos_core/pull/1774). - - zos_mvs_raw - Module would not populate stderr return value. Fix now populates - stderr in return values. (https://github.com/ansible-collections/ibm_zos_core/pull/1808). - - zos_mvs_raw - Module would obfuscate the return code from the program when - failing returning 8 instead. Fix now returns the proper return code from the - program. (https://github.com/ansible-collections/ibm_zos_core/pull/1774). - - zos_mvs_raw - Module would return the stderr content in stdout when verbose - was true and return code was 0. Fix now does not replace stdout content with - stderr. (https://github.com/ansible-collections/ibm_zos_core/pull/1794). - - zos_mvs_raw - Option ``tmp_hlq`` was not being used as HLQ when creating backup - data sets. Fix now uses ``tmp_hlq`` as HLQ for backup data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1847). - - zos_script - When the user trying to run a remote script had execute permissions - but wasn't owner of the file, the module would fail while trying to change - permissions on it. Fix now ensures the module first checks if the user can - execute the script and only try to change permissions when necessary. (https://github.com/ansible-collections/ibm_zos_core/pull/1852). + - zos_copy - Improve module zos_copy error handling when the user does not have + universal access authority set to UACC(READ) for SAF Profile 'MVS.MCSOPER.ZOAU' + and SAF Class OPERCMDS. The module now handles the exception and returns an + informative message. (https://github.com/ansible-collections/ibm_zos_core/pull/1744). + - zos_fetch - Some relative paths were not accepted as a parameter e.g. C(files/fetched_file). + Change now allows the user to use different types of relative paths as a parameter. + (https://github.com/ansible-collections/ibm_zos_core/pull/1769). + - zos_find - Module would not find VSAM data and index resource types. Fix now + finds the data and index resource types. (https://github.com/ansible-collections/ibm_zos_core/pull/1822). + - zos_find - Module would not find a VSAM cluster resource type if it was in + use with DISP=OLD. Fix now finds the VSAM cluster. (https://github.com/ansible-collections/ibm_zos_core/pull/1822). + - zos_job_query - Module was not returning values for system and subsystem. + Fix now returns these values. (https://github.com/ansible-collections/ibm_zos_core/pull/1759). + - zos_mvs_raw - If a program failed with a non-zero return code and verbose + was false, the module would succeed. Whereas, if the program failed and verbose + was true the module would fail. Fix now has a consistent behavior and fails + in both cases. (https://github.com/ansible-collections/ibm_zos_core/pull/1774). + - zos_mvs_raw - Module would not populate stderr return value. Fix now populates + stderr in return values. (https://github.com/ansible-collections/ibm_zos_core/pull/1808). + - zos_mvs_raw - Module would obfuscate the return code from the program when + failing returning 8 instead. Fix now returns the proper return code from the + program. (https://github.com/ansible-collections/ibm_zos_core/pull/1774). + - zos_mvs_raw - Module would return the stderr content in stdout when verbose + was true and return code was 0. Fix now does not replace stdout content with + stderr. (https://github.com/ansible-collections/ibm_zos_core/pull/1794). + - zos_mvs_raw - Option ``tmp_hlq`` was not being used as HLQ when creating backup + data sets. Fix now uses ``tmp_hlq`` as HLQ for backup data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1847). + - zos_script - When the user trying to run a remote script had execute permissions + but wasn't owner of the file, the module would fail while trying to change + permissions on it. Fix now ensures the module first checks if the user can + execute the script and only try to change permissions when necessary. (https://github.com/ansible-collections/ibm_zos_core/pull/1852). minor_changes: - - module_utils/import_handler - When importing a non supported ZOAU version - like 1.2.x the module would throw a non user friendly error message. Error - message is now explicit about ZOAU not being properly configured for Ansible. - (https://github.com/ansible-collections/ibm_zos_core/pull/1804). - - zos_copy - Added new option ``autoescape`` to ``template_parameters``, allowing - users to disable autoescaping of common XML/HTML characters when working with - Jinja templates. (https://github.com/ansible-collections/ibm_zos_core/pull/1810). - - zos_copy - Adds error message when a PDS/E source member does not exist or - is not cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/1821). - - zos_job_submit - Add deploy and forget capability. Now when wait_time_s is - 0, the module will submit the job and will not wait to get the job details - or content, returning only the job id. (https://github.com/ansible-collections/ibm_zos_core/pull/1746). - - zos_job_submit - Added new option ``autoescape`` to ``template_parameters``, - allowing users to disable autoescaping of common XML/HTML characters when - working with Jinja templates. (https://github.com/ansible-collections/ibm_zos_core/pull/1810). - - zos_job_submit - Added support to run zos_job_submit tasks in async mode inside - playbooks. (https://github.com/ansible-collections/ibm_zos_core/pull/1786). - - zos_mvs_raw - Added ``max_rc`` option. Now when the user sets ``max_rc``, - the module tolerates the failure if the return code is smaller than the ``max_rc`` - specified, however, return value ``changed`` will be False if the program - return code is not 0. (https://github.com/ansible-collections/ibm_zos_core/pull/1813). - - zos_script - Added new option ``autoescape`` to ``template_parameters``, allowing - users to disable autoescaping of common XML/HTML characters when working with - Jinja templates. (https://github.com/ansible-collections/ibm_zos_core/pull/1810). - release_summary: 'Release Date: ''2025-01-30'' + - module_utils/import_handler - When importing a non supported ZOAU version + like 1.2.x the module would throw a non user friendly error message. Error + message is now explicit about ZOAU not being properly configured for Ansible. + (https://github.com/ansible-collections/ibm_zos_core/pull/1804). + - zos_copy - Added new option ``autoescape`` to ``template_parameters``, allowing + users to disable autoescaping of common XML/HTML characters when working with + Jinja templates. (https://github.com/ansible-collections/ibm_zos_core/pull/1810). + - zos_copy - Adds error message when a PDS/E source member does not exist or + is not cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/1821). + - zos_job_submit - Add deploy and forget capability. Now when wait_time_s is + 0, the module will submit the job and will not wait to get the job details + or content, returning only the job id. (https://github.com/ansible-collections/ibm_zos_core/pull/1746). + - zos_job_submit - Added new option ``autoescape`` to ``template_parameters``, + allowing users to disable autoescaping of common XML/HTML characters when + working with Jinja templates. (https://github.com/ansible-collections/ibm_zos_core/pull/1810). + - zos_job_submit - Added support to run zos_job_submit tasks in async mode inside + playbooks. (https://github.com/ansible-collections/ibm_zos_core/pull/1786). + - zos_mvs_raw - Added ``max_rc`` option. Now when the user sets ``max_rc``, + the module tolerates the failure if the return code is smaller than the ``max_rc`` + specified, however, return value ``changed`` will be False if the program + return code is not 0. (https://github.com/ansible-collections/ibm_zos_core/pull/1813). + - zos_script - Added new option ``autoescape`` to ``template_parameters``, allowing + users to disable autoescaping of common XML/HTML characters when working with + Jinja templates. (https://github.com/ansible-collections/ibm_zos_core/pull/1810). + release_summary: "Release Date: '2025-01-30' This changelog describes all changes made to the modules and plugins included @@ -641,55 +641,55 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 1744-zos_copy-racf-uacc-updates.yml - - 1746-deploy-forget-zos_job_submit.yml - - 1750-docs-gdg-archive.yml - - 1751-Avoid_parsing_on_job_utils.yml - - 1756-update-docstrings-zos_unarchive.yml - - 1759-system-subsystem-job_query.yml - - 1769_handle_relative_path.yml - - 1772-fix-testing-ansible-2.17.1.yml - - 1773-fix_absent_inheritance.yml - - 1774-zos_mvs_raw-verbose-fail.yml - - 1775-zos_mvs_raw-size-tests.yml - - 1784-ac-added-flake8-collections-mapping.yml - - 1786-zos_job_submit-async-support.yml - - 1790-restricted-hlq-user-test.yml - - 1794-zos_mvs_raw_stdout-replaced-by-stderr.yml - - 1804_enhance_error_message_when_failing_import.yml - - 1808-zos_mvs_raw-stderr-not-returned.yml - - 1810-autoescape-templates.yml - - 1813-implement_max_rc_mvs_raw.yml - - 1816-migrate_shell_call_cp_to_python_module.yml - - 1821-Add_error_message.yml - - 1822-zos_find-vsam-disp-old.yml - - 1827-ac-script-update.yml - - 1831-replace-copy-commands-copy-util.yml - - 1839-volume_init_test_portability.yml - - 1842-Add_test_case_to_validate_advance_regular_expression.yml - - 1847-zos_mvs_raw-tmphlq-as-backup-hlq.yml - - 1851-remove_hard_coded_python_path.yml - - 1852-zos_script-permissions.yml - - 1890-zos_copy-test-suite.yml - - v1.13.0-beta.1_summary.yml + - 1744-zos_copy-racf-uacc-updates.yml + - 1746-deploy-forget-zos_job_submit.yml + - 1750-docs-gdg-archive.yml + - 1751-Avoid_parsing_on_job_utils.yml + - 1756-update-docstrings-zos_unarchive.yml + - 1759-system-subsystem-job_query.yml + - 1769_handle_relative_path.yml + - 1772-fix-testing-ansible-2.17.1.yml + - 1773-fix_absent_inheritance.yml + - 1774-zos_mvs_raw-verbose-fail.yml + - 1775-zos_mvs_raw-size-tests.yml + - 1784-ac-added-flake8-collections-mapping.yml + - 1786-zos_job_submit-async-support.yml + - 1790-restricted-hlq-user-test.yml + - 1794-zos_mvs_raw_stdout-replaced-by-stderr.yml + - 1804_enhance_error_message_when_failing_import.yml + - 1808-zos_mvs_raw-stderr-not-returned.yml + - 1810-autoescape-templates.yml + - 1813-implement_max_rc_mvs_raw.yml + - 1816-migrate_shell_call_cp_to_python_module.yml + - 1821-Add_error_message.yml + - 1822-zos_find-vsam-disp-old.yml + - 1827-ac-script-update.yml + - 1831-replace-copy-commands-copy-util.yml + - 1839-volume_init_test_portability.yml + - 1842-Add_test_case_to_validate_advance_regular_expression.yml + - 1847-zos_mvs_raw-tmphlq-as-backup-hlq.yml + - 1851-remove_hard_coded_python_path.yml + - 1852-zos_script-permissions.yml + - 1890-zos_copy-test-suite.yml + - v1.13.0-beta.1_summary.yml modules: - - description: Resize a zfs data set. - name: zos_zfs_resize - namespace: '' - release_date: '2025-01-30' + - description: Resize a zfs data set. + name: zos_zfs_resize + namespace: "" + release_date: "2025-01-30" 1.14.0: changes: bugfixes: - - zos_copy - Previously, when trying to copy into remote and ansible's default - temporary directory was not created before execution the copy task would fail. - Fix now creates the temporary directory if possible. (https://github.com/ansible-collections/ibm_zos_core/pull/2109) - - zos_job_submit - Previously, the use of `become` would result in a permissions - error while trying to execute a job from a local file. Fix now allows a user - to escalate privileges when executing a job transferred from the controller - node. (https://github.com/ansible-collections/ibm_zos_core/pull/2109) - release_summary: 'Release Date: ''2025-06-30'' + - zos_copy - Previously, when trying to copy into remote and ansible's default + temporary directory was not created before execution the copy task would fail. + Fix now creates the temporary directory if possible. (https://github.com/ansible-collections/ibm_zos_core/pull/2109) + - zos_job_submit - Previously, the use of `become` would result in a permissions + error while trying to execute a job from a local file. Fix now allows a user + to escalate privileges when executing a job transferred from the controller + node. (https://github.com/ansible-collections/ibm_zos_core/pull/2109) + release_summary: "Release Date: '2025-06-30' This changelog describes all changes made to the modules and plugins included @@ -697,77 +697,77 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 2109-copy-fix-tmp-dir.yml - - v1.14.0_summary.yml - release_date: '2025-06-24' + - 2109-copy-fix-tmp-dir.yml + - v1.14.0_summary.yml + release_date: "2025-06-24" 1.14.0-beta.1: changes: bugfixes: - - zos_apf - When trying to add a library into the APF list that was already - added, the module would fail. Fix now will not fail the module, and will inform - the user that the library is already on the APF list. (https://github.com/ansible-collections/ibm_zos_core/pull/1893) - - zos_copy - Previously, if the dataset name included special characters such - as $, validation would fail when force_lock was false. This has been changed - to allow the use of special characters when force_lock option is false. (https://github.com/ansible-collections/ibm_zos_core/pull/1908) - - zos_copy - When ``asa_text`` was set to true at the same time as ``force_lock``, - a copy would fail saying the destination was already in use. Fix now opens - destination data sets up with disposition SHR when ``force_lock`` and ``asa_text`` - are set to true. (https://github.com/ansible-collections/ibm_zos_core/pull/1941). - - zos_copy - the carriage return characters were being removed from only first - 1024 bytes of a file. Now fixed that issue to support removal of the carriage - return characters from the complete file content if the file size is more - than 1024 bytes. (https://github.com/ansible-collections/ibm_zos_core/pull/1954). - - zos_data_set - Module would fail when trying to delete a non-existent Generation - Data Group. Fix now provides a successful response with `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/1971). - - zos_data_set - Module would fail with TypeError when trying to replace an - existing GDG. Fix now allows to replacing a GDG. (https://github.com/ansible-collections/ibm_zos_core/pull/1964). - - zos_job_output - When searching for a job name, module performed a '*' (find - all), then filtered the results. Fix now asks for specific job name, making - the return faster and more precise. (https://github.com/ansible-collections/ibm_zos_core/pull/1916). - - zos_job_query - When searching for a job name, module performed a '*' (find - all), then filtered the results. Fix now asks for specific job name, making - the return faster and more precise. (https://github.com/ansible-collections/ibm_zos_core/pull/1916). - - zos_job_submit - When searching for a job name, module performed a '*' (find - all), then filtered the results. Fix now asks for specific job name, making - the return faster and more precise. (https://github.com/ansible-collections/ibm_zos_core/pull/1916). - - zos_mount - Module failed when using persistent option with a data set that - contains non UTF-8 characters. Fix now can use a data set with non UTF-8 characters - as data_store. (https://github.com/ansible-collections/ibm_zos_core/pull/1871). + - zos_apf - When trying to add a library into the APF list that was already + added, the module would fail. Fix now will not fail the module, and will inform + the user that the library is already on the APF list. (https://github.com/ansible-collections/ibm_zos_core/pull/1893) + - zos_copy - Previously, if the dataset name included special characters such + as $, validation would fail when force_lock was false. This has been changed + to allow the use of special characters when force_lock option is false. (https://github.com/ansible-collections/ibm_zos_core/pull/1908) + - zos_copy - When ``asa_text`` was set to true at the same time as ``force_lock``, + a copy would fail saying the destination was already in use. Fix now opens + destination data sets up with disposition SHR when ``force_lock`` and ``asa_text`` + are set to true. (https://github.com/ansible-collections/ibm_zos_core/pull/1941). + - zos_copy - the carriage return characters were being removed from only first + 1024 bytes of a file. Now fixed that issue to support removal of the carriage + return characters from the complete file content if the file size is more + than 1024 bytes. (https://github.com/ansible-collections/ibm_zos_core/pull/1954). + - zos_data_set - Module would fail when trying to delete a non-existent Generation + Data Group. Fix now provides a successful response with `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/1971). + - zos_data_set - Module would fail with TypeError when trying to replace an + existing GDG. Fix now allows to replacing a GDG. (https://github.com/ansible-collections/ibm_zos_core/pull/1964). + - zos_job_output - When searching for a job name, module performed a '*' (find + all), then filtered the results. Fix now asks for specific job name, making + the return faster and more precise. (https://github.com/ansible-collections/ibm_zos_core/pull/1916). + - zos_job_query - When searching for a job name, module performed a '*' (find + all), then filtered the results. Fix now asks for specific job name, making + the return faster and more precise. (https://github.com/ansible-collections/ibm_zos_core/pull/1916). + - zos_job_submit - When searching for a job name, module performed a '*' (find + all), then filtered the results. Fix now asks for specific job name, making + the return faster and more precise. (https://github.com/ansible-collections/ibm_zos_core/pull/1916). + - zos_mount - Module failed when using persistent option with a data set that + contains non UTF-8 characters. Fix now can use a data set with non UTF-8 characters + as data_store. (https://github.com/ansible-collections/ibm_zos_core/pull/1871). minor_changes: - - zos_copy - Adds ``large`` as a choice for ``type`` in ``dest_data_set``. (https://github.com/ansible-collections/ibm_zos_core/pull/1938) - - zos_copy - Adds logging of Jinja rendered template content when `use_template` - is true and verbosity level `-vvv` is used. (https://github.com/ansible-collections/ibm_zos_core/pull/1968). - - zos_copy - Adds support for copying in asynchronous mode inside playbooks. - (https://github.com/ansible-collections/ibm_zos_core/pull/1953). - - zos_copy - Removes the need to allow READ access to MVS.MCSOPER.ZOAU to execute - the module by changing how the module checks if a data set is locked. (https://github.com/ansible-collections/ibm_zos_core/pull/1917). - - zos_job_output - Add execution_time return value in the modules response. - (https://github.com/ansible-collections/ibm_zos_core/pull/1891). - - zos_job_query - Add execution_time return value in the modules response. (https://github.com/ansible-collections/ibm_zos_core/pull/1891). - - zos_job_query - Loads correct bytes size value for dds when using zoau 1.3.4 - or later (https://github.com/ansible-collections/ibm_zos_core/pull/1868). - - zos_job_query - System and Subsystem are now retrieved from JES. (https://github.com/ansible-collections/ibm_zos_core/pull/1900). - - zos_job_submit - Adds logging of Jinja rendered template content when `use_template` - is true and verbosity level `-vvv` is used. (https://github.com/ansible-collections/ibm_zos_core/pull/1962). - - zos_job_submit - Add execution_time return value in the modules response. - (https://github.com/ansible-collections/ibm_zos_core/pull/1891). - - zos_job_submit - Loads correct bytes size value for dds when using zoau 1.3.4 - or later (https://github.com/ansible-collections/ibm_zos_core/pull/1868). - - zos_script - Adds error message for when remote source does not exist. (https://github.com/ansible-collections/ibm_zos_core/pull/1894). - - zos_script - Adds logging of Jinja rendered template content when `use_template` - is true and verbosity level `-vvv` is used. (https://github.com/ansible-collections/ibm_zos_core/pull/1968). - - zos_script - Adds support for running local and remote scripts in asynchronous - mode inside playbooks. (https://github.com/ansible-collections/ibm_zos_core/pull/1934). - - zos_script - Support automatic removal of carriage return line breaks [CR, - CRLF] when copying local files to USS. (https://github.com/ansible-collections/ibm_zos_core/pull/1954). - - zos_stat - Adds support to query data sets using their aliases. (https://github.com/ansible-collections/ibm_zos_core/pull/2048) - - zos_unarchive - Adds support for unarchiving files in asynchronous mode inside - playbooks. (https://github.com/ansible-collections/ibm_zos_core/pull/2020). - - zos_zfs_resize - Adds validations for trace destination dataset used for trace - verbose. (https://github.com/ansible-collections/ibm_zos_core/pull/1897). - release_summary: 'Release Date: ''2025-04-30'' + - zos_copy - Adds ``large`` as a choice for ``type`` in ``dest_data_set``. (https://github.com/ansible-collections/ibm_zos_core/pull/1938) + - zos_copy - Adds logging of Jinja rendered template content when `use_template` + is true and verbosity level `-vvv` is used. (https://github.com/ansible-collections/ibm_zos_core/pull/1968). + - zos_copy - Adds support for copying in asynchronous mode inside playbooks. + (https://github.com/ansible-collections/ibm_zos_core/pull/1953). + - zos_copy - Removes the need to allow READ access to MVS.MCSOPER.ZOAU to execute + the module by changing how the module checks if a data set is locked. (https://github.com/ansible-collections/ibm_zos_core/pull/1917). + - zos_job_output - Add execution_time return value in the modules response. + (https://github.com/ansible-collections/ibm_zos_core/pull/1891). + - zos_job_query - Add execution_time return value in the modules response. (https://github.com/ansible-collections/ibm_zos_core/pull/1891). + - zos_job_query - Loads correct bytes size value for dds when using zoau 1.3.4 + or later (https://github.com/ansible-collections/ibm_zos_core/pull/1868). + - zos_job_query - System and Subsystem are now retrieved from JES. (https://github.com/ansible-collections/ibm_zos_core/pull/1900). + - zos_job_submit - Adds logging of Jinja rendered template content when `use_template` + is true and verbosity level `-vvv` is used. (https://github.com/ansible-collections/ibm_zos_core/pull/1962). + - zos_job_submit - Add execution_time return value in the modules response. + (https://github.com/ansible-collections/ibm_zos_core/pull/1891). + - zos_job_submit - Loads correct bytes size value for dds when using zoau 1.3.4 + or later (https://github.com/ansible-collections/ibm_zos_core/pull/1868). + - zos_script - Adds error message for when remote source does not exist. (https://github.com/ansible-collections/ibm_zos_core/pull/1894). + - zos_script - Adds logging of Jinja rendered template content when `use_template` + is true and verbosity level `-vvv` is used. (https://github.com/ansible-collections/ibm_zos_core/pull/1968). + - zos_script - Adds support for running local and remote scripts in asynchronous + mode inside playbooks. (https://github.com/ansible-collections/ibm_zos_core/pull/1934). + - zos_script - Support automatic removal of carriage return line breaks [CR, + CRLF] when copying local files to USS. (https://github.com/ansible-collections/ibm_zos_core/pull/1954). + - zos_stat - Adds support to query data sets using their aliases. (https://github.com/ansible-collections/ibm_zos_core/pull/2048) + - zos_unarchive - Adds support for unarchiving files in asynchronous mode inside + playbooks. (https://github.com/ansible-collections/ibm_zos_core/pull/2020). + - zos_zfs_resize - Adds validations for trace destination dataset used for trace + verbose. (https://github.com/ansible-collections/ibm_zos_core/pull/1897). + release_summary: "Release Date: '2025-04-30' This changelog describes all changes made to the modules and plugins included @@ -775,74 +775,75 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 1389-update-docstring-data_set.yml - - 1508-fix-ddsl-size.yml - - 1865-fix-sanity-2.18-encode.yml - - 1866-fix-sanity-2.18-zos_fetch.yml - - 1871-Failing_using_persistent_option.yml - - 1873-zos_copy-test-suite.yml - - 1891-zos_job_submit-include-execution_time.yml - - 1893-zos_apf-fails-for-already-existing-file.yml - - 1894-Gracefully_fail_when_remote_src_does_not_exist.yml - - 1897-check_attributes_of_trace_file.yml - - 1900-zos_job_query-map_system_subsystem.yml - - 1908-wrong_behaviour_force_lock_parameter.yml - - 1915-fix-conftest.yml - - 1916-job-status-query-on-job-name.yml - - 1917-Opercmd_usage_for_disposition.yml - - 1925-Deprecation-warning-zos-fetch-and-zos-job-submit.yml - - 1927-job_update-system-check.yml - - 1933-zos_blockinfile-false_negative_double_quotes.yml - - 1934-zos_script-implemented-async-support.yml - - 1938-add-large-type-zos_copy.yml - - 1941-zos_copy-asa-dest-lock-support.yml - - 1945-Hardcode-volume-values-zos_volume_init .yml - - 1953-zos_copy_implemented_async_support.yml - - 1954-zos_copy-cr-removal-while-copying-file-to-remote.yml - - 1956-zoau-version-support-for-test-cases.yml - - 1962-zos_job_submit-log-generated.yml - - 1963-support-attributes-docs.yml - - 1964-zos_data_set-fixing-gdg-replace-issue.yml - - 1968-zos_script-log-generated.yml - - 1969-Update_documentation_about_alias_and_executable_on_copy.yml - - 1970-update-zos_copy-wildcard.yml - - 1971-zos_data_set-fixing-gdg-delete-issue.yml - - 1986-add-retry-dest-lock-tests.yml - - 1987-async_documentation.yml - - 1992-zos_data_set-enable-by-default-attributes.yml - - 2000-documentation-for-check_mode-all-modules.yml - - 2020-zos-unarchive_async_support.yml - - 2025-Add_documentation_of_false_negatives_and_ensure_proper_testing.yml - - 2026-Fix_encoding_for_testing.yml - - 2028-add-refresh-command-ac-tool.yml - - 2048-alias-support-zos_stat.yml - - v1.14.0-beta.1_summary.yml + - 1389-update-docstring-data_set.yml + - 1508-fix-ddsl-size.yml + - 1865-fix-sanity-2.18-encode.yml + - 1866-fix-sanity-2.18-zos_fetch.yml + - 1871-Failing_using_persistent_option.yml + - 1873-zos_copy-test-suite.yml + - 1891-zos_job_submit-include-execution_time.yml + - 1893-zos_apf-fails-for-already-existing-file.yml + - 1894-Gracefully_fail_when_remote_src_does_not_exist.yml + - 1897-check_attributes_of_trace_file.yml + - 1900-zos_job_query-map_system_subsystem.yml + - 1908-wrong_behaviour_force_lock_parameter.yml + - 1915-fix-conftest.yml + - 1916-job-status-query-on-job-name.yml + - 1917-Opercmd_usage_for_disposition.yml + - 1925-Deprecation-warning-zos-fetch-and-zos-job-submit.yml + - 1927-job_update-system-check.yml + - 1933-zos_blockinfile-false_negative_double_quotes.yml + - 1934-zos_script-implemented-async-support.yml + - 1938-add-large-type-zos_copy.yml + - 1941-zos_copy-asa-dest-lock-support.yml + - 1945-Hardcode-volume-values-zos_volume_init .yml + - 1953-zos_copy_implemented_async_support.yml + - 1954-zos_copy-cr-removal-while-copying-file-to-remote.yml + - 1956-zoau-version-support-for-test-cases.yml + - 1962-zos_job_submit-log-generated.yml + - 1963-support-attributes-docs.yml + - 1964-zos_data_set-fixing-gdg-replace-issue.yml + - 1968-zos_script-log-generated.yml + - 1969-Update_documentation_about_alias_and_executable_on_copy.yml + - 1970-update-zos_copy-wildcard.yml + - 1971-zos_data_set-fixing-gdg-delete-issue.yml + - 1986-add-retry-dest-lock-tests.yml + - 1987-async_documentation.yml + - 1992-zos_data_set-enable-by-default-attributes.yml + - 2000-documentation-for-check_mode-all-modules.yml + - 2020-zos-unarchive_async_support.yml + - 2025-Add_documentation_of_false_negatives_and_ensure_proper_testing.yml + - 2026-Fix_encoding_for_testing.yml + - 2028-add-refresh-command-ac-tool.yml + - 2048-alias-support-zos_stat.yml + - v1.14.0-beta.1_summary.yml modules: - - description: Retrieve facts from MVS data sets, USS files, aggregates and generation - data groups - name: zos_stat - namespace: '' - release_date: '2025-04-25' + - description: + Retrieve facts from MVS data sets, USS files, aggregates and generation + data groups + name: zos_stat + namespace: "" + release_date: "2025-04-25" 1.14.1: changes: bugfixes: - - zos_copy - Previously, if the ansible user was not a superuser copying a file - into the managed node resulted in permission denied error. Fix now sets the - correct permissions for the ansible user for copying to the remote. (https://github.com/ansible-collections/ibm_zos_core/pull/2196) - - zos_job_submit - Previously, if the ansible user was not a superuser copying - a file into the managed node resulted in permission denied error. Fix now - sets the correct permissions for the ansible user for copying to the remote. - (https://github.com/ansible-collections/ibm_zos_core/pull/2196) - - zos_script - Previously, if the ansible user was not a superuser copying a - file into the managed node resulted in permission denied error. Fix now sets - the correct permissions for the ansible user for copying to the remote. (https://github.com/ansible-collections/ibm_zos_core/pull/2196) - - zos_unarchive - Previously, if the ansible user was not a superuser copying - a file into the managed node resulted in permission denied error. Fix now - sets the correct permissions for the ansible user for copying to the remote. - (https://github.com/ansible-collections/ibm_zos_core/pull/2196) - release_summary: 'Release Date: ''2025-07-03'' + - zos_copy - Previously, if the ansible user was not a superuser copying a file + into the managed node resulted in permission denied error. Fix now sets the + correct permissions for the ansible user for copying to the remote. (https://github.com/ansible-collections/ibm_zos_core/pull/2196) + - zos_job_submit - Previously, if the ansible user was not a superuser copying + a file into the managed node resulted in permission denied error. Fix now + sets the correct permissions for the ansible user for copying to the remote. + (https://github.com/ansible-collections/ibm_zos_core/pull/2196) + - zos_script - Previously, if the ansible user was not a superuser copying a + file into the managed node resulted in permission denied error. Fix now sets + the correct permissions for the ansible user for copying to the remote. (https://github.com/ansible-collections/ibm_zos_core/pull/2196) + - zos_unarchive - Previously, if the ansible user was not a superuser copying + a file into the managed node resulted in permission denied error. Fix now + sets the correct permissions for the ansible user for copying to the remote. + (https://github.com/ansible-collections/ibm_zos_core/pull/2196) + release_summary: "Release Date: '2025-07-03' This changelog describes all changes made to the modules and plugins included @@ -850,14 +851,14 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 2196-fix-copy-permission-issues.yml - - v1.14.1_summary.yml - release_date: '2025-07-01' + - 2196-fix-copy-permission-issues.yml + - v1.14.1_summary.yml + release_date: "2025-07-01" 1.15.0: changes: - release_summary: 'Release Date: ''2025-09-30'' + release_summary: "Release Date: '2025-09-30' This changelog describes all changes made to the modules and plugins included @@ -865,74 +866,74 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - v1.15.0_summary.yml - release_date: '2025-09-22' + - v1.15.0_summary.yml + release_date: "2025-09-22" 1.15.0-beta.1: changes: bugfixes: - - zos_backup_restore - Return value `backup_name` was empty upon successful - result. Fix now returns `backup_name` populated. (https://github.com/ansible-collections/ibm_zos_core/pull/2040). - - zos_data_set - Attempting to create a data set with the same name on a different - volume did not work, nor did it report a failure. The fix now informs the - user that if the data set is cataloged on a different volume, it needs to - be uncataloged before using the data set module to create a new data set on - a different volume. (https://github.com/ansible-collections/ibm_zos_core/pull/2057). - - zos_fetch - Previously, the use of `become` would result in a permissions - error while trying to fetch a data set or a member. Fix now allows a user - to escalate privileges when fetching resources. (https://github.com/ansible-collections/ibm_zos_core/pull/2079) - - zos_lineinfile - Return values ``return_content`` and ``backup_name`` were - not always being returned. Fix now ensure that these values are always present - in the module's response. (https://github.com/ansible-collections/ibm_zos_core/pull/2120) - - zos_lineinfile - The module would report a false negative when certain special - characters where present in the `line` option. Fix now reports the successful - operation. (https://github.com/ansible-collections/ibm_zos_core/pull/2080). - - zos_mount - FSUMF168 return in stderror means that the mount dataset wouldn't - resolve. While this shows a catalog or volume issue, it should not impact - our search for an existing mount. Added handling to the df call, so that FSUMF168 - are ignored. (https://github.com/ansible-collections/ibm_zos_core/pull/2060). + - zos_backup_restore - Return value `backup_name` was empty upon successful + result. Fix now returns `backup_name` populated. (https://github.com/ansible-collections/ibm_zos_core/pull/2040). + - zos_data_set - Attempting to create a data set with the same name on a different + volume did not work, nor did it report a failure. The fix now informs the + user that if the data set is cataloged on a different volume, it needs to + be uncataloged before using the data set module to create a new data set on + a different volume. (https://github.com/ansible-collections/ibm_zos_core/pull/2057). + - zos_fetch - Previously, the use of `become` would result in a permissions + error while trying to fetch a data set or a member. Fix now allows a user + to escalate privileges when fetching resources. (https://github.com/ansible-collections/ibm_zos_core/pull/2079) + - zos_lineinfile - Return values ``return_content`` and ``backup_name`` were + not always being returned. Fix now ensure that these values are always present + in the module's response. (https://github.com/ansible-collections/ibm_zos_core/pull/2120) + - zos_lineinfile - The module would report a false negative when certain special + characters where present in the `line` option. Fix now reports the successful + operation. (https://github.com/ansible-collections/ibm_zos_core/pull/2080). + - zos_mount - FSUMF168 return in stderror means that the mount dataset wouldn't + resolve. While this shows a catalog or volume issue, it should not impact + our search for an existing mount. Added handling to the df call, so that FSUMF168 + are ignored. (https://github.com/ansible-collections/ibm_zos_core/pull/2060). minor_changes: - - zos_archive - Adds support for encoding before archiving files. (https://github.com/ansible-collections/ibm_zos_core/pull/2081) - - zos_archive - Adds support for reverting the encoding of a source's files - after archiving them. (https://github.com/ansible-collections/ibm_zos_core/pull/2192) - - zos_archive - Adds support for skipping encoding in archive module. This allows - users to skip encoding for certain files before archiving them. (https://github.com/ansible-collections/ibm_zos_core/pull/2116) - - zos_copy - Added support for british pound character usage in file content - and data set names for both source and destination when copying. (https://github.com/ansible-collections/ibm_zos_core/pull/2153) - - zos_copy - Adds new option `identical_gdg_copy` in the module. This allows - copying GDG generations from a source base to a destination base while preserving - generation data set absolute names when the destination base does not exist - prior to the copy. (https://github.com/ansible-collections/ibm_zos_core/pull/2100). - - zos_copy - Adds support of using alias names in src and dest parameters for - PS, PDS and PDSE data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/2103) - - zos_fetch - Updated the documentation to correctly state what the default - behavior of the module is. (https://github.com/ansible-collections/ibm_zos_core/pull/2047). - - zos_find - Adds functionality to find migrated data sets. - Adds functionality - to find different types of data sets at the same time. (https://github.com/ansible-collections/ibm_zos_core/pull/2073). - - zos_job_output - Adds new fields cpu_time, origin_node and execution_node - to response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). - - zos_job_query - Adds new fields cpu_time, origin_node and execution_node to - response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). - - zos_job_submit - Adds new fields cpu_time, origin_node and execution_node - to response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). - - zos_mvs_raw - Before this addition, you could not put anything in columns - 1 or 2, were reserved for JCL processing. Change now allows add reserved_cols - option and validate that the module get access to modify dd_content option - base on the value, if not retain the previous behavior or work. (https://github.com/ansible-collections/ibm_zos_core/pull/2086) - - zos_mvs_raw - Adds support for volume data definition. (https://github.com/ansible-collections/ibm_zos_core/pull/2194) - - zos_stat - Added support to recall migrated data sets and return its attributes. - (https://github.com/ansible-collections/ibm_zos_core/pull/2075) - - zos_stat - Adds new fields that describe the type of the resource that was - queried. These new fields are `isfile`, `isdataset`, `isaggregate` and `isgdg`. - (https://github.com/ansible-collections/ibm_zos_core/pull/2137) - - zos_stat - Adds support to query data sets using their aliases. (https://github.com/ansible-collections/ibm_zos_core/pull/2061) - - zos_stat - Module now returns whether the resource queried exists on the managed - node with the `exists` field inside `stat`. (https://github.com/ansible-collections/ibm_zos_core/pull/2137) - - zos_unarchive - Added encoding support for the unarchive module. This allows - users to encode the files after unarchiving them in a perticular encoding. - (https://github.com/ansible-collections/ibm_zos_core/pull/2105) - release_summary: 'Release Date: ''2025-07-30'' + - zos_archive - Adds support for encoding before archiving files. (https://github.com/ansible-collections/ibm_zos_core/pull/2081) + - zos_archive - Adds support for reverting the encoding of a source's files + after archiving them. (https://github.com/ansible-collections/ibm_zos_core/pull/2192) + - zos_archive - Adds support for skipping encoding in archive module. This allows + users to skip encoding for certain files before archiving them. (https://github.com/ansible-collections/ibm_zos_core/pull/2116) + - zos_copy - Added support for british pound character usage in file content + and data set names for both source and destination when copying. (https://github.com/ansible-collections/ibm_zos_core/pull/2153) + - zos_copy - Adds new option `identical_gdg_copy` in the module. This allows + copying GDG generations from a source base to a destination base while preserving + generation data set absolute names when the destination base does not exist + prior to the copy. (https://github.com/ansible-collections/ibm_zos_core/pull/2100). + - zos_copy - Adds support of using alias names in src and dest parameters for + PS, PDS and PDSE data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/2103) + - zos_fetch - Updated the documentation to correctly state what the default + behavior of the module is. (https://github.com/ansible-collections/ibm_zos_core/pull/2047). + - zos_find - Adds functionality to find migrated data sets. - Adds functionality + to find different types of data sets at the same time. (https://github.com/ansible-collections/ibm_zos_core/pull/2073). + - zos_job_output - Adds new fields cpu_time, origin_node and execution_node + to response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). + - zos_job_query - Adds new fields cpu_time, origin_node and execution_node to + response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). + - zos_job_submit - Adds new fields cpu_time, origin_node and execution_node + to response. (https://github.com/ansible-collections/ibm_zos_core/pull/2056). + - zos_mvs_raw - Before this addition, you could not put anything in columns + 1 or 2, were reserved for JCL processing. Change now allows add reserved_cols + option and validate that the module get access to modify dd_content option + base on the value, if not retain the previous behavior or work. (https://github.com/ansible-collections/ibm_zos_core/pull/2086) + - zos_mvs_raw - Adds support for volume data definition. (https://github.com/ansible-collections/ibm_zos_core/pull/2194) + - zos_stat - Added support to recall migrated data sets and return its attributes. + (https://github.com/ansible-collections/ibm_zos_core/pull/2075) + - zos_stat - Adds new fields that describe the type of the resource that was + queried. These new fields are `isfile`, `isdataset`, `isaggregate` and `isgdg`. + (https://github.com/ansible-collections/ibm_zos_core/pull/2137) + - zos_stat - Adds support to query data sets using their aliases. (https://github.com/ansible-collections/ibm_zos_core/pull/2061) + - zos_stat - Module now returns whether the resource queried exists on the managed + node with the `exists` field inside `stat`. (https://github.com/ansible-collections/ibm_zos_core/pull/2137) + - zos_unarchive - Added encoding support for the unarchive module. This allows + users to encode the files after unarchiving them in a perticular encoding. + (https://github.com/ansible-collections/ibm_zos_core/pull/2105) + release_summary: "Release Date: '2025-07-30' This changelog describes all changes made to the modules and plugins included @@ -942,60 +943,60 @@ releases: the collections `release notes ` - ' + " fragments: - - 2030-Set_dynamic_volumes_for_volume_init.yml - - 2033-remove-dev-tools.yml - - 2039-documentation-zos_copy-opercmd.yml - - 2040-zos_backup_restore-fixed_return_backup_name.yml - - 2043-zos_mount-volume-size-resize.yml - - 2047-zos_fetch-update-docs.yml - - 2049-zos_backup_restore-added-return-values-in-doc.yml - - 2055-Zos_apf-shell-commands-to-api.yml - - 2056-zos_job-modules-adding-cpu_time-execution_node-origin_node.yml - - 2057-zos_data_set&data_set-Ensure-Dataset-Volume-Validation.yml - - 2058-Add_sanity_ignore_for_2_18.yml - - 2059-Github-sanity-2-18-fix.yml - - 2060-zos_mount-skip_fsumf168_from_df.yml - - 2061-alias-support-zos_stat.yml - - 2068-zos_data_set-Removed-Extra-Validation.yml - - 2073-zos_find-finding-migrated-datasets.yml - - 2075-migrated-data-sets-support-zos_stat.yml - - 2079-become-use-zos_fetch.yml - - 2080-zos_lineinfile-fixed-json-parsing.yml - - 2081-zos_archive-add-encoding-support.yml - - 2086_Programs_fails_when_need_access_to_the_first_and_second_columns.yml - - 2098-docs-migrated-data-sets-examples.yml - - 2100-zos_copy-Identical-gdg-copy-support.yml - - 2103-zos_copy-supporting-aliases-for-src-and-dest.yml - - 2105-zos_unarchive-encoding-support.yml - - 2111-update-zos_copy-block-size-docs.yml - - 2116-zos_archive-skip_encoding.yml - - 2120-zos_lineinfile-Added-return-content.yml - - 2135-Test_case_to_check_tmphlq_zos_backup_restore.yml - - 2136-zos_unarchive_skip_encoding_support.yml - - 2137-zos_stat-new-fields.yml - - 2138-Allow_run_dependency_finder_with_other_command.yml - - 2153-zos_copy-supporting-pound-in-dataset-name-and content.yml - - 2192-zos_archive-revert_src_encoding.yml - - 2194-zos_mvs_raw-Supporting-for-volume-definition-in-zos_mvs_raw.yml - - v1.15.0-beta.1_summary.yml + - 2030-Set_dynamic_volumes_for_volume_init.yml + - 2033-remove-dev-tools.yml + - 2039-documentation-zos_copy-opercmd.yml + - 2040-zos_backup_restore-fixed_return_backup_name.yml + - 2043-zos_mount-volume-size-resize.yml + - 2047-zos_fetch-update-docs.yml + - 2049-zos_backup_restore-added-return-values-in-doc.yml + - 2055-Zos_apf-shell-commands-to-api.yml + - 2056-zos_job-modules-adding-cpu_time-execution_node-origin_node.yml + - 2057-zos_data_set&data_set-Ensure-Dataset-Volume-Validation.yml + - 2058-Add_sanity_ignore_for_2_18.yml + - 2059-Github-sanity-2-18-fix.yml + - 2060-zos_mount-skip_fsumf168_from_df.yml + - 2061-alias-support-zos_stat.yml + - 2068-zos_data_set-Removed-Extra-Validation.yml + - 2073-zos_find-finding-migrated-datasets.yml + - 2075-migrated-data-sets-support-zos_stat.yml + - 2079-become-use-zos_fetch.yml + - 2080-zos_lineinfile-fixed-json-parsing.yml + - 2081-zos_archive-add-encoding-support.yml + - 2086_Programs_fails_when_need_access_to_the_first_and_second_columns.yml + - 2098-docs-migrated-data-sets-examples.yml + - 2100-zos_copy-Identical-gdg-copy-support.yml + - 2103-zos_copy-supporting-aliases-for-src-and-dest.yml + - 2105-zos_unarchive-encoding-support.yml + - 2111-update-zos_copy-block-size-docs.yml + - 2116-zos_archive-skip_encoding.yml + - 2120-zos_lineinfile-Added-return-content.yml + - 2135-Test_case_to_check_tmphlq_zos_backup_restore.yml + - 2136-zos_unarchive_skip_encoding_support.yml + - 2137-zos_stat-new-fields.yml + - 2138-Allow_run_dependency_finder_with_other_command.yml + - 2153-zos_copy-supporting-pound-in-dataset-name-and content.yml + - 2192-zos_archive-revert_src_encoding.yml + - 2194-zos_mvs_raw-Supporting-for-volume-definition-in-zos_mvs_raw.yml + - v1.15.0-beta.1_summary.yml modules: - - description: Replace all instances of a pattern within a file or data set. - name: zos_replace - namespace: '' - release_date: '2025-08-05' + - description: Replace all instances of a pattern within a file or data set. + name: zos_replace + namespace: "" + release_date: "2025-08-05" 1.2.1: changes: bugfixes: - - zos_copy - fixed regex support, dictionary merge operation fix - - zos_encode - removed TemporaryDirectory usage. - - zos_fetch - fix quote import + - zos_copy - fixed regex support, dictionary merge operation fix + - zos_encode - removed TemporaryDirectory usage. + - zos_fetch - fix quote import minor_changes: - - Documentation related to configuration has been migrated to the `playbook - repository `__ - - Python 2.x support - release_summary: 'Release Date: ''2020-10-09'' + - Documentation related to configuration has been migrated to the `playbook + repository `__ + - Python 2.x support + release_summary: "Release Date: '2020-10-09' This changlelog describes all changes made to the modules and plugins included @@ -1008,53 +1009,54 @@ releases: Beginning this release, all playbooks previously included with the collection - will be made available on the `playbook repository `__.' + will be made available on the `playbook repository `__." fragments: - - v1.2.1_summary.yml - - v1.2.1_summary_bugs.yml - - v1.2.1_summary_minor.yml + - v1.2.1_summary.yml + - v1.2.1_summary_bugs.yml + - v1.2.1_summary_minor.yml modules: - - description: Manage textual data on z/OS - name: zos_lineinfile - namespace: '' - release_date: '2022-06-07' + - description: Manage textual data on z/OS + name: zos_lineinfile + namespace: "" + release_date: "2022-06-07" 1.3.0: changes: bugfixes: - - Action plugin zos_copy was updated to support Python 2.7. - - Job utility is an internal library used by several modules. It has been updated - to use a custom written parsing routine capable of handling special characters - to prevent job related reading operations from failing when a special character - is encountered. - - Module zos_copy was updated to fail gracefully when a it encounters a non-zero - return code. - - Module zos_copy was updated to support copying data set members that are program - objects to a PDSE. Prior to this update, copying data set members would yield - an error; - FSUM8976 Error writing to PDSE member - - Module zos_job_submit referenced a non-existent option and was corrected to - **wait_time_s**. - - Module zos_job_submit was updated to remove all trailing **\r** from jobs - that are submitted from the controller. - - Module zos_tso_command support was added for when the command output contained - special characters. - - Playbook zos_operator_basics.yaml has been updated to use end in the WTO reply - over the previous use of cancel. Using cancel is not a valid reply and results - in an execution error. + - Action plugin zos_copy was updated to support Python 2.7. + - Job utility is an internal library used by several modules. It has been updated + to use a custom written parsing routine capable of handling special characters + to prevent job related reading operations from failing when a special character + is encountered. + - Module zos_copy was updated to fail gracefully when a it encounters a non-zero + return code. + - Module zos_copy was updated to support copying data set members that are program + objects to a PDSE. Prior to this update, copying data set members would yield + an error; - FSUM8976 Error writing to PDSE member + - Module zos_job_submit referenced a non-existent option and was corrected to + **wait_time_s**. + - Module zos_job_submit was updated to remove all trailing **\r** from jobs + that are submitted from the controller. + - Module zos_tso_command support was added for when the command output contained + special characters. + - Playbook zos_operator_basics.yaml has been updated to use end in the WTO reply + over the previous use of cancel. Using cancel is not a valid reply and results + in an execution error. known_issues: - - When executing programs using zos_mvs_raw, you may encounter errors that originate - in the implementation of the programs. Two such known issues are noted below - of which one has been addressed with an APAR. - zos_mvs_raw module execution - fails when invoking Database Image Copy 2 Utility or Database Recovery Utility - in conjunction with FlashCopy or Fast Replication. - zos_mvs_raw module execution - fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". - This issue is addressed by APAR PH28089. + - When executing programs using zos_mvs_raw, you may encounter errors that originate + in the implementation of the programs. Two such known issues are noted below + of which one has been addressed with an APAR. - zos_mvs_raw module execution + fails when invoking Database Image Copy 2 Utility or Database Recovery Utility + in conjunction with FlashCopy or Fast Replication. - zos_mvs_raw module execution + fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". + This issue is addressed by APAR PH28089. minor_changes: - - All modules support relative paths and remove choice case sensitivity. - - zos_data_set added support to allocate and format zFS data sets. - - zos_operator supports new options **wait** and **wait_time_s** such that you - can specify that zos_operator wait the full **wait_time_s** or return as soon - as the first operator command executes. - release_summary: "Release Date: '2021-19-04'\nThis changlelog describes all + - All modules support relative paths and remove choice case sensitivity. + - zos_data_set added support to allocate and format zFS data sets. + - zos_operator supports new options **wait** and **wait_time_s** such that you + can specify that zos_operator wait the full **wait_time_s** or return as soon + as the first operator command executes. + release_summary: + "Release Date: '2021-19-04'\nThis changlelog describes all changes made to the modules and plugins included\nin this collection.\nFor additional details such as required dependencies and availablity review\nthe collections `release notes `__ @@ -1069,153 +1071,157 @@ releases: supported configuration and generate\n inventory and a variables configuration.\n \ - Automate software management with SMP/E Playbooks\n" fragments: - - v1.3.0_summary.yml - - v1.3.0_summary_bugs.yml - - v1.3.0_summary_known.yml - - v1.3.0_summary_minor.yml + - v1.3.0_summary.yml + - v1.3.0_summary_bugs.yml + - v1.3.0_summary_known.yml + - v1.3.0_summary_minor.yml modules: - - description: Add or remove libraries to Authorized Program Facility (APF) - name: zos_apf - namespace: '' - - description: Backup and restore data sets and volumes - name: zos_backup_restore - namespace: '' - - description: Manage block of multi-line textual data on z/OS - name: zos_blockinfile - namespace: '' - - description: Manage data sets - name: zos_data_set - namespace: '' - - description: Find matching data sets - name: zos_find - namespace: '' - release_date: '2022-06-07' + - description: Add or remove libraries to Authorized Program Facility (APF) + name: zos_apf + namespace: "" + - description: Backup and restore data sets and volumes + name: zos_backup_restore + namespace: "" + - description: Manage block of multi-line textual data on z/OS + name: zos_blockinfile + namespace: "" + - description: Manage data sets + name: zos_data_set + namespace: "" + - description: Find matching data sets + name: zos_find + namespace: "" + release_date: "2022-06-07" 1.3.1: changes: bugfixes: - - zos_ping was updated to support Automation Hub documentation generation. - - zos_ssh connection plugin was updated to prioritize the execution of modules - written in REXX over other implementations such is the case for zos_ping. + - zos_ping was updated to support Automation Hub documentation generation. + - zos_ssh connection plugin was updated to prioritize the execution of modules + written in REXX over other implementations such is the case for zos_ping. known_issues: - - When executing programs using zos_mvs_raw, you may encounter errors that originate - in the implementation of the programs. Two such known issues are noted below - of which one has been addressed with an APAR. - zos_mvs_raw module execution - fails when invoking Database Image Copy 2 Utility or Database Recovery Utility - in conjunction with FlashCopy or Fast Replication. - zos_mvs_raw module execution - fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". - This issue is addressed by APAR PH28089. - release_summary: "Release Date: '2022-27-04'\nThis changlelog describes all + - When executing programs using zos_mvs_raw, you may encounter errors that originate + in the implementation of the programs. Two such known issues are noted below + of which one has been addressed with an APAR. - zos_mvs_raw module execution + fails when invoking Database Image Copy 2 Utility or Database Recovery Utility + in conjunction with FlashCopy or Fast Replication. - zos_mvs_raw module execution + fails when invoking DFSRRC00 with parm "UPB,PRECOMP", "UPB, POSTCOMP" or "UPB,PRECOMP,POSTCOMP". + This issue is addressed by APAR PH28089. + release_summary: + "Release Date: '2022-27-04'\nThis changlelog describes all changes made to the modules and plugins included\nin this collection.\nFor additional details such as required dependencies and availablity review\nthe collections `release notes `__ \n" fragments: - - v1.3.1_summary.yml - - v1.3.1_summary_bugs.yml - - v1.3.1_summary_known.yml - release_date: '2022-06-07' + - v1.3.1_summary.yml + - v1.3.1_summary_bugs.yml + - v1.3.1_summary_known.yml + release_date: "2022-06-07" 1.3.3: changes: bugfixes: - - zos_copy was updated to correct deletion of all temporary files and unwarranted - deletes. - When the module would complete, a cleanup routine did not take - into account that other processes had open temporary files and thus would - error when trying to remove them. - When the module would copy a directory - (source) from USS to another USS directory (destination), any files currently - in the destination would be deleted. The modules behavior has changed such - that files are no longer deleted unless the force option is set to true. When - **force=true**, copying files or a directory to a USS destination will continue - if it encounters existing files or directories and overwrite any corresponding - files. - - zos_job_query was updated to correct a boolean condition that always evaluated - to "CANCELLED". - When querying jobs that are either **CANCELLED** or have - **FAILED**, they were always treated as **CANCELLED**. - release_summary: "Release Date: '2022-26-04'\nThis changlelog describes all + - zos_copy was updated to correct deletion of all temporary files and unwarranted + deletes. - When the module would complete, a cleanup routine did not take + into account that other processes had open temporary files and thus would + error when trying to remove them. - When the module would copy a directory + (source) from USS to another USS directory (destination), any files currently + in the destination would be deleted. The modules behavior has changed such + that files are no longer deleted unless the force option is set to true. When + **force=true**, copying files or a directory to a USS destination will continue + if it encounters existing files or directories and overwrite any corresponding + files. + - zos_job_query was updated to correct a boolean condition that always evaluated + to "CANCELLED". - When querying jobs that are either **CANCELLED** or have + **FAILED**, they were always treated as **CANCELLED**. + release_summary: + "Release Date: '2022-26-04'\nThis changlelog describes all changes made to the modules and plugins included\nin this collection.\nFor additional details such as required dependencies and availablity review\nthe collections `release notes `__ \n" fragments: - - v1.3.3_summary.yml - - v1.3.3_summary_bugs.yml - release_date: '2022-06-07' + - v1.3.3_summary.yml + - v1.3.3_summary_bugs.yml + release_date: "2022-06-07" 1.3.5: changes: bugfixes: - - "zos_ssh - connection plugin was updated to correct a bug in Ansible that\n - \ would result in playbook task retries overriding the SSH connection\n retries. - This is resolved by renaming the zos_ssh option\n retries to reconnection_retries. - The update addresses users of\n ansible-core v2.9 which continues to use - retries and users of\n ansible-core v2.11 or later which uses reconnection_retries.\n - \ This also resolves a bug in the connection that referenced a deprecated\n - \ constant. (https://github.com/ansible-collections/ibm_zos_core/pull/328)\n" - release_summary: "Release Date: '2022-03-06'\nThis changlelog describes all + - "zos_ssh - connection plugin was updated to correct a bug in Ansible that\n + \ would result in playbook task retries overriding the SSH connection\n retries. + This is resolved by renaming the zos_ssh option\n retries to reconnection_retries. + The update addresses users of\n ansible-core v2.9 which continues to use + retries and users of\n ansible-core v2.11 or later which uses reconnection_retries.\n + \ This also resolves a bug in the connection that referenced a deprecated\n + \ constant. (https://github.com/ansible-collections/ibm_zos_core/pull/328)\n" + release_summary: + "Release Date: '2022-03-06'\nThis changlelog describes all changes made to the modules and plugins included\nin this collection.\nFor additional details such as required dependencies and availablity review\nthe collections `release notes `__ \n" fragments: - - 328-rename-retries-to-reconnection_retries.yml - - v1.3.4_summary.yml - release_date: '2022-06-07' + - 328-rename-retries-to-reconnection_retries.yml + - v1.3.4_summary.yml + release_date: "2022-06-07" 1.3.6: changes: bugfixes: - - jobs.py - fixes a utility used by module `zos_job_output` that would truncate - the DD content. (https://github.com/ansible-collections/ibm_zos_core/pull/462) - - zos_copy - fixes a bug that when a directory is copied from the controller - to the managed node and a mode is set, the mode is now applied to the directory - on the controller. If the directory being copied contains files and mode is - set, mode will only be applied to the files being copied not the pre-existing - files.(https://github.com/ansible-collections/ibm_zos_core/pull/462) - - zos_copy - fixes a bug where options were not defined in the module argument - spec that will result in error when running `ansible-core` 2.11 and using - options `force` or `mode`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) - - zos_fetch - fixes a bug where an option was not defined in the module argument - spec that will result in error when running `ansible-core` 2.11 and using - option `encoding`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) - - zos_job_submit - fixes a bug where an option was not defined in the module - argument spec that will result in error when running `ansible-core` 2.11 and - using option `encoding`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) - - zos_ssh - fixes connection plugin which will error when using `ansible-core` - 2.11 with an `AttributeError module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'`. - (https://github.com/ansible-collections/ibm_zos_core/pull/462) - - zos_ssh - fixes connection plugin which will error when using `ansible-core` - 2.11 with an `AttributeError module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'`. - (https://github.com/ansible-collections/ibm_zos_core/pull/513) + - jobs.py - fixes a utility used by module `zos_job_output` that would truncate + the DD content. (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_copy - fixes a bug that when a directory is copied from the controller + to the managed node and a mode is set, the mode is now applied to the directory + on the controller. If the directory being copied contains files and mode is + set, mode will only be applied to the files being copied not the pre-existing + files.(https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_copy - fixes a bug where options were not defined in the module argument + spec that will result in error when running `ansible-core` 2.11 and using + options `force` or `mode`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_fetch - fixes a bug where an option was not defined in the module argument + spec that will result in error when running `ansible-core` 2.11 and using + option `encoding`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_job_submit - fixes a bug where an option was not defined in the module + argument spec that will result in error when running `ansible-core` 2.11 and + using option `encoding`. (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_ssh - fixes connection plugin which will error when using `ansible-core` + 2.11 with an `AttributeError module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'`. + (https://github.com/ansible-collections/ibm_zos_core/pull/462) + - zos_ssh - fixes connection plugin which will error when using `ansible-core` + 2.11 with an `AttributeError module 'ansible.constants' has no attribute 'ANSIBLE_SSH_CONTROL_PATH_DIR'`. + (https://github.com/ansible-collections/ibm_zos_core/pull/513) minor_changes: - - zos_copy - was enhanced for when `src` is a directory and ends with "/", the - contents of it will be copied into the root of `dest`. If it doesn't end with - "/", the directory itself will be copied. (https://github.com/ansible-collections/ibm_zos_core/pull/515) - release_summary: "Release Date: '2022-10-07'\nThis changelog describes all changes + - zos_copy - was enhanced for when `src` is a directory and ends with "/", the + contents of it will be copied into the root of `dest`. If it doesn't end with + "/", the directory itself will be copied. (https://github.com/ansible-collections/ibm_zos_core/pull/515) + release_summary: + "Release Date: '2022-10-07'\nThis changelog describes all changes made to the modules and plugins included\nin this collection. The release date is the date the changelog is created.\nFor additional details such as required dependencies and availability review\nthe collections `release notes `__ \n" fragments: - - 462-copy-fetch-submit-utils.yml - - 513-zos_ssh-support-ansible-2.11.yml - - 515-copy-support-directories.yml - - v1.3.6_summary.yml - release_date: '2022-10-07' + - 462-copy-fetch-submit-utils.yml + - 513-zos_ssh-support-ansible-2.11.yml + - 515-copy-support-directories.yml + - v1.3.6_summary.yml + release_date: "2022-10-07" 1.4.0: changes: minor_changes: - - zos_copy - enhanced to optimize how it captures the permission bits state - for the `dest`. This change now reviews the source files instead of traversing - the entire `dest` path. (https://github.com/ansible-collections/ibm_zos_core/pull/561) - - zos_copy - enhanced to support creating a parent directory when it does not - exist in the `dest` path. Prior to this change, if a parent directory anywhere - in the path did not exist the task would fail as it was stated in documentation. - (https://github.com/ansible-collections/ibm_zos_core/pull/561) - - "zos_copy - enhanced to support system symbols in PARMLIB. System symbols - are elements that allow different z/OS\xAE systems to share PARMLIB definitions - while retaining unique values in those definitions. This was fixed in a future - release through the use of one of the ZOAU dependency but this version of - `ibm_zos_core` does not support that dependency version so this support was - added. (https://github.com/ansible-collections/ibm_zos_core/pull/566)" - release_summary: 'Release Date: ''2022-12-07'' + - zos_copy - enhanced to optimize how it captures the permission bits state + for the `dest`. This change now reviews the source files instead of traversing + the entire `dest` path. (https://github.com/ansible-collections/ibm_zos_core/pull/561) + - zos_copy - enhanced to support creating a parent directory when it does not + exist in the `dest` path. Prior to this change, if a parent directory anywhere + in the path did not exist the task would fail as it was stated in documentation. + (https://github.com/ansible-collections/ibm_zos_core/pull/561) + - "zos_copy - enhanced to support system symbols in PARMLIB. System symbols + are elements that allow different z/OS\xAE systems to share PARMLIB definitions + while retaining unique values in those definitions. This was fixed in a future + release through the use of one of the ZOAU dependency but this version of + `ibm_zos_core` does not support that dependency version so this support was + added. (https://github.com/ansible-collections/ibm_zos_core/pull/566)" + release_summary: "Release Date: '2022-12-07' This changelog describes all changes made to the modules and plugins included @@ -1225,121 +1231,122 @@ releases: the collections `release notes `__ - ' + " fragments: - - 561-update-directory-create.yml - - 566-update-with-symbol-support.yml - - v1.4.0_summary.yml - release_date: '2022-12-07' + - 561-update-directory-create.yml + - 566-update-with-symbol-support.yml + - v1.4.0_summary.yml + release_date: "2022-12-07" 1.4.0-beta.1: changes: bugfixes: - - zos_job_output was updated to correct possible truncated responses for the - ddname content. This would occur for jobs with very large amounts of content - from a ddname. - - "zos_ssh - connection plugin was updated to correct a bug in Ansible that\n - \ would result in playbook task retries overriding the SSH connection\n retries. - This is resolved by renaming the zos_ssh option\n retries to reconnection_retries. - The update addresses users of\n ansible-core v2.9 which continues to use - retries and users of\n ansible-core v2.11 or later which uses reconnection_retries.\n - \ This also resolves a bug in the connection that referenced a deprecated\n - \ constant. (https://github.com/ansible-collections/ibm_zos_core/pull/328)\n" + - zos_job_output was updated to correct possible truncated responses for the + ddname content. This would occur for jobs with very large amounts of content + from a ddname. + - "zos_ssh - connection plugin was updated to correct a bug in Ansible that\n + \ would result in playbook task retries overriding the SSH connection\n retries. + This is resolved by renaming the zos_ssh option\n retries to reconnection_retries. + The update addresses users of\n ansible-core v2.9 which continues to use + retries and users of\n ansible-core v2.11 or later which uses reconnection_retries.\n + \ This also resolves a bug in the connection that referenced a deprecated\n + \ constant. (https://github.com/ansible-collections/ibm_zos_core/pull/328)\n" deprecated_features: - - zos_copy and zos_fetch option sftp_port has been deprecated. To set the SFTP - port, use the supported options in the ansible.builtin.ssh plugin. Refer to - the `SSH port `__ - option to configure the port used during the modules SFTP transport. - - zos_copy module option model_ds has been removed. The model_ds logic is now - automatically managed and data sets are either created based on the src data - set or overridden by the new option destination_dataset. - - zos_ssh connection plugin has been removed, it is no longer required. You - must remove all playbook references to connection ibm.ibm_zos_core.zos_ssh. + - zos_copy and zos_fetch option sftp_port has been deprecated. To set the SFTP + port, use the supported options in the ansible.builtin.ssh plugin. Refer to + the `SSH port `__ + option to configure the port used during the modules SFTP transport. + - zos_copy module option model_ds has been removed. The model_ds logic is now + automatically managed and data sets are either created based on the src data + set or overridden by the new option destination_dataset. + - zos_ssh connection plugin has been removed, it is no longer required. You + must remove all playbook references to connection ibm.ibm_zos_core.zos_ssh. major_changes: - - zos_copy was updated to support the ansible.builtin.ssh connection options; - for further reference refer to the SSH plugin documentation. - - zos_copy was updated to take into account the record length when the source - is a USS file and the destination is a data set with a record length. This - is done by inspecting the destination data set attributes and using these - attributes to create a new data set. - - zos_copy was updated with the capabilities to define destination data sets - from within the zos_copy module. In the case where you are copying to a data - set destination that does not exist, you can now do so using the new zos_copy - module option destination. - - zos_fetch was updated to support the ansible.builtin.ssh connection options; - for further reference refer to the SSH plugin documentation. - - zos_job_output was updated to to include the completion code (CC) for each - individual job step as part of the ret_code response. - - zos_job_query was updated to handle when an invalid job ID or job name is - used with the module and returns a proper response. - - zos_job_query was updated to support a 7 digit job number ID for when there - are greater than 99,999 jobs in the history. - - zos_job_submit was enhanced to check for 'JCL ERROR' when jobs are submitted - and result in a proper module response. - - zos_job_submit was updated to fail fast when a submitted job fails instead - of waiting a predetermined time. - - zos_operator_action_query response messages were improved with more diagnostic - information in the event an error is encountered. - - zos_ping was updated to remove the need for the zos_ssh connection plugin - dependency. - release_summary: "Release Date: '2021-06-23'\nThis changlelog describes all + - zos_copy was updated to support the ansible.builtin.ssh connection options; + for further reference refer to the SSH plugin documentation. + - zos_copy was updated to take into account the record length when the source + is a USS file and the destination is a data set with a record length. This + is done by inspecting the destination data set attributes and using these + attributes to create a new data set. + - zos_copy was updated with the capabilities to define destination data sets + from within the zos_copy module. In the case where you are copying to a data + set destination that does not exist, you can now do so using the new zos_copy + module option destination. + - zos_fetch was updated to support the ansible.builtin.ssh connection options; + for further reference refer to the SSH plugin documentation. + - zos_job_output was updated to to include the completion code (CC) for each + individual job step as part of the ret_code response. + - zos_job_query was updated to handle when an invalid job ID or job name is + used with the module and returns a proper response. + - zos_job_query was updated to support a 7 digit job number ID for when there + are greater than 99,999 jobs in the history. + - zos_job_submit was enhanced to check for 'JCL ERROR' when jobs are submitted + and result in a proper module response. + - zos_job_submit was updated to fail fast when a submitted job fails instead + of waiting a predetermined time. + - zos_operator_action_query response messages were improved with more diagnostic + information in the event an error is encountered. + - zos_ping was updated to remove the need for the zos_ssh connection plugin + dependency. + release_summary: + "Release Date: '2021-06-23'\nThis changlelog describes all changes made to the modules and plugins included\nin this collection.\nFor additional details such as required dependencies and availablity review\nthe collections `release notes `__ \n" fragments: - - v1.4.0-beta.1_summary.yml - - v1.4.0-beta.1_summary_bugs.yml - - v1.4.0-beta.1_summary_deprecated.yml - - v1.4.0-beta.1_summary_minor.yml - - v1.4.0-beta.1_summary_trivial.yml + - v1.4.0-beta.1_summary.yml + - v1.4.0-beta.1_summary_bugs.yml + - v1.4.0-beta.1_summary_deprecated.yml + - v1.4.0-beta.1_summary_minor.yml + - v1.4.0-beta.1_summary_trivial.yml modules: - - description: Mount a z/OS file system. - name: zos_mount - namespace: '' - release_date: '2022-06-10' + - description: Mount a z/OS file system. + name: zos_mount + namespace: "" + release_date: "2022-06-10" 1.4.0-beta.2: changes: bugfixes: - - zos_copy - fixes a bug that did not create a data set on the specified volume. - (https://github.com/ansible-collections/ibm_zos_core/pull/306) - - zos_copy - fixes a bug where a number of attributes were not an option when - using `dest_data_set`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) - - zos_job_output - fixes a bug that returned all ddname's when a specific ddname - was provided. Now a specific ddname can be returned and all others ignored. - (https://github.com/ansible-collections/ibm_zos_core/pull/507) - - zos_mount - fixed option `tag_ccsid` to correctly allow for type int. (https://github.com/ansible-collections/ibm_zos_core/pull/502) - - zos_operator - enhanced to allow for MVS operator `SET` command, `SET` is - equivalent to the abbreviated `T` command. (https://github.com/ansible-collections/ibm_zos_core/pull/501) + - zos_copy - fixes a bug that did not create a data set on the specified volume. + (https://github.com/ansible-collections/ibm_zos_core/pull/306) + - zos_copy - fixes a bug where a number of attributes were not an option when + using `dest_data_set`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) + - zos_job_output - fixes a bug that returned all ddname's when a specific ddname + was provided. Now a specific ddname can be returned and all others ignored. + (https://github.com/ansible-collections/ibm_zos_core/pull/507) + - zos_mount - fixed option `tag_ccsid` to correctly allow for type int. (https://github.com/ansible-collections/ibm_zos_core/pull/502) + - zos_operator - enhanced to allow for MVS operator `SET` command, `SET` is + equivalent to the abbreviated `T` command. (https://github.com/ansible-collections/ibm_zos_core/pull/501) minor_changes: - - zos_copy - enhanced the force option when `force=true` and the remote file - or data set `dest` is NOT empty, the `dest` will be deleted and recreated - with the `src` data set attributes, otherwise it will be recreated with the - `dest` data set attributes. (https://github.com/ansible-collections/ibm_zos_core/pull/306) - - zos_copy - fixes a bug that when a directory is copied from the controller - to the managed node and a mode is set, the mode is applied to the directory - on the managed node. If the directory being copied contains files and mode - is set, mode will only be applied to the files being copied not the pre-existing - files. (https://github.com/ansible-collections/ibm_zos_core/pull/306) - - zos_copy - fixes a bug where options were not defined in the module argument - spec that will result in error when running `ansible-core` v2.11 and using - options `force` or `mode`. (https://github.com/ansible-collections/ibm_zos_core/pull/496) - - zos_copy - introduced an updated creation policy referred to as precedence - rules such that if `dest_data_set` is set, this will take precedence. If `dest` - is an empty data set, the empty data set will be written with the expectation - its attributes satisfy the copy. If no precedent rule has been exercised, - `dest` will be created with the same attributes of `src`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) - - zos_copy - introduced new computation capabilities such that if `dest` is - a nonexistent data set, the attributes assigned will depend on the type of - `src`. If `src` is a USS file, `dest` will have a Fixed Block (FB) record - format and the remaining attributes will be computed. If `src` is binary, - `dest` will have a Fixed Block (FB) record format with a record length of - 80, block size of 32760, and the remaining attributes will be computed. (https://github.com/ansible-collections/ibm_zos_core/pull/306) - - zos_copy - option `dest_dataset` has been deprecated and removed in favor - of the new option `dest_data_set`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) - - zos_copy - was enhanced for when `src` is a directory and ends with "/", the - contents of it will be copied into the root of `dest`. It it doesn't end with - "/", the directory itself will be copied. (https://github.com/ansible-collections/ibm_zos_core/pull/496) - release_summary: 'Release Date: ''2022-10-17'' + - zos_copy - enhanced the force option when `force=true` and the remote file + or data set `dest` is NOT empty, the `dest` will be deleted and recreated + with the `src` data set attributes, otherwise it will be recreated with the + `dest` data set attributes. (https://github.com/ansible-collections/ibm_zos_core/pull/306) + - zos_copy - fixes a bug that when a directory is copied from the controller + to the managed node and a mode is set, the mode is applied to the directory + on the managed node. If the directory being copied contains files and mode + is set, mode will only be applied to the files being copied not the pre-existing + files. (https://github.com/ansible-collections/ibm_zos_core/pull/306) + - zos_copy - fixes a bug where options were not defined in the module argument + spec that will result in error when running `ansible-core` v2.11 and using + options `force` or `mode`. (https://github.com/ansible-collections/ibm_zos_core/pull/496) + - zos_copy - introduced an updated creation policy referred to as precedence + rules such that if `dest_data_set` is set, this will take precedence. If `dest` + is an empty data set, the empty data set will be written with the expectation + its attributes satisfy the copy. If no precedent rule has been exercised, + `dest` will be created with the same attributes of `src`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) + - zos_copy - introduced new computation capabilities such that if `dest` is + a nonexistent data set, the attributes assigned will depend on the type of + `src`. If `src` is a USS file, `dest` will have a Fixed Block (FB) record + format and the remaining attributes will be computed. If `src` is binary, + `dest` will have a Fixed Block (FB) record format with a record length of + 80, block size of 32760, and the remaining attributes will be computed. (https://github.com/ansible-collections/ibm_zos_core/pull/306) + - zos_copy - option `dest_dataset` has been deprecated and removed in favor + of the new option `dest_data_set`. (https://github.com/ansible-collections/ibm_zos_core/pull/306) + - zos_copy - was enhanced for when `src` is a directory and ends with "/", the + contents of it will be copied into the root of `dest`. It it doesn't end with + "/", the directory itself will be copied. (https://github.com/ansible-collections/ibm_zos_core/pull/496) + release_summary: "Release Date: '2022-10-17' This changelog describes all changes made to the modules and plugins included @@ -1349,36 +1356,36 @@ releases: the collections `release notes `__ - ' + " fragments: - - 306-updates-zos-copy-architecture.yml - - 496-copy-support-directories.yml - - 501-allow-operator-set-command.yml - - 502-update-ccsid-type-int.yml - - 507-display-specific-ddname.yml - - v1.4.0-beta.2_summary.yml - release_date: '2022-10-13' + - 306-updates-zos-copy-architecture.yml + - 496-copy-support-directories.yml + - 501-allow-operator-set-command.yml + - 502-update-ccsid-type-int.yml + - 507-display-specific-ddname.yml + - v1.4.0-beta.2_summary.yml + release_date: "2022-10-13" 1.4.1: changes: bugfixes: - - zos_copy - Copy failed from a loadlib member to another loadlib member. Fix - now looks for error in stdout in the if statement to use -X option. (https://github.com/ansible-collections/ibm_zos_core/pull/640) - - zos_copy - Fixed a bug where the module would change the mode for a directory - when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/742) - - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an - error while computing the record length for a new destination dataset. Issue - 664. (https://github.com/ansible-collections/ibm_zos_core/pull/732) - - zos_copy - Fixes a bug where the code for fixing an issue with newlines in - files (issue 599) would use the wrong encoding for normalization. Issue 678. - (https://github.com/ansible-collections/ibm_zos_core/pull/732) - - zos_copy - fixed wrongful creation of destination backups when module option - `force` is true, creating emergency backups meant to restore the system to - its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) - - zos_copy - fixes a bug where the computed record length for a new destination - dataset would include newline characters. (https://github.com/ansible-collections/ibm_zos_core/pull/620) - - zos_job_query - fixes a bug where a boolean was not being properly compared. - (https://github.com/ansible-collections/ibm_zos_core/pull/379) - release_summary: 'Release Date: ''2023-04-18'' + - zos_copy - Copy failed from a loadlib member to another loadlib member. Fix + now looks for error in stdout in the if statement to use -X option. (https://github.com/ansible-collections/ibm_zos_core/pull/640) + - zos_copy - Fixed a bug where the module would change the mode for a directory + when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/742) + - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an + error while computing the record length for a new destination dataset. Issue + 664. (https://github.com/ansible-collections/ibm_zos_core/pull/732) + - zos_copy - Fixes a bug where the code for fixing an issue with newlines in + files (issue 599) would use the wrong encoding for normalization. Issue 678. + (https://github.com/ansible-collections/ibm_zos_core/pull/732) + - zos_copy - fixed wrongful creation of destination backups when module option + `force` is true, creating emergency backups meant to restore the system to + its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) + - zos_copy - fixes a bug where the computed record length for a new destination + dataset would include newline characters. (https://github.com/ansible-collections/ibm_zos_core/pull/620) + - zos_job_query - fixes a bug where a boolean was not being properly compared. + (https://github.com/ansible-collections/ibm_zos_core/pull/379) + release_summary: "Release Date: '2023-04-18' This changelog describes all changes made to the modules and plugins included @@ -1388,53 +1395,53 @@ releases: the collections `release notes `__ - ' + " fragments: - - 579-zos-query-boolean-correction.yml - - 588-update-emergency-backup.yml - - 599-copy-carriage-return.yml - - 601-copy-loadlib-member.yml - - 728-zos_operator-example-updates.yml - - 732-zos_copy-encoding-bugs.yml - - 742_zos_copy-mode-is-applied-to-the-destination-directory-a-deviation-from-the-communtiy-module-behavior.yaml - - v1.4.1_summary.yml - release_date: '2023-04-18' + - 579-zos-query-boolean-correction.yml + - 588-update-emergency-backup.yml + - 599-copy-carriage-return.yml + - 601-copy-loadlib-member.yml + - 728-zos_operator-example-updates.yml + - 732-zos_copy-encoding-bugs.yml + - 742_zos_copy-mode-is-applied-to-the-destination-directory-a-deviation-from-the-communtiy-module-behavior.yaml + - v1.4.1_summary.yml + release_date: "2023-04-18" 1.5.0: changes: bugfixes: - - zos_copy - Copy failed from a loadlib member to another loadlib member. Fix - now looks for error in stdout in the if statement to use -X option. (https://github.com/ansible-collections/ibm_zos_core/pull/641) - - zos_copy - Fixed a bug where the module would change the mode for a directory - when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/746) - - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an - error while computing the record length for a new destination dataset. Issue - 664. (https://github.com/ansible-collections/ibm_zos_core/pull/725) - - zos_copy - Fixes a bug where the code for fixing an issue with newlines in - files (issue 599) would use the wrong encoding for normalization. Issue 678. - (https://github.com/ansible-collections/ibm_zos_core/pull/725) - - zos_job_submit - Fixes the issue when `wait_time_s` was set to 0 that would - result in a `type` error that a stack trace would result in the response, - issue 670. (https://github.com/ansible-collections/ibm_zos_core/pull/683) - - zos_job_submit - Fixes the issue when a job encounters a security exception - no job log would would result in the response, issue 684. (https://github.com/ansible-collections/ibm_zos_core/pull/683) - - zos_job_submit - Fixes the issue when a job is configured for a syntax check - using TYPRUN=SCAN that it would wait the full duration set by `wait_time_s` - to return a response, issue 685. (https://github.com/ansible-collections/ibm_zos_core/pull/683) - - zos_job_submit - Fixes the issue when a job is configured for a syntax check - using TYPRUN=SCAN that no job log would result in the response, issue 685. - (https://github.com/ansible-collections/ibm_zos_core/pull/683) - - zos_job_submit - Fixes the issue when a job is purged by the system that a - stack trace would result in the response, issue 681. (https://github.com/ansible-collections/ibm_zos_core/pull/683) - - zos_job_submit - Fixes the issue when invalid JCL syntax is submitted that - a stack trace would result in the response, issue 623. (https://github.com/ansible-collections/ibm_zos_core/pull/683) - - zos_job_submit - Fixes the issue when resources (data sets) identified in - JCL did not exist such that a stack trace would result in the response, issue - 624. (https://github.com/ansible-collections/ibm_zos_core/pull/683) - - zos_job_submit - Fixes the issue where the response did not include the job - log when a non-zero return code would occur, issue 655. (https://github.com/ansible-collections/ibm_zos_core/pull/683) - - zos_operator - fixed incorrect example descriptions and updated the doc to - highlight the deprecated option `wait`. (https://github.com/ansible-collections/ibm_zos_core/pull/648) - release_summary: 'Release Date: ''2023-04-21'' + - zos_copy - Copy failed from a loadlib member to another loadlib member. Fix + now looks for error in stdout in the if statement to use -X option. (https://github.com/ansible-collections/ibm_zos_core/pull/641) + - zos_copy - Fixed a bug where the module would change the mode for a directory + when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/746) + - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an + error while computing the record length for a new destination dataset. Issue + 664. (https://github.com/ansible-collections/ibm_zos_core/pull/725) + - zos_copy - Fixes a bug where the code for fixing an issue with newlines in + files (issue 599) would use the wrong encoding for normalization. Issue 678. + (https://github.com/ansible-collections/ibm_zos_core/pull/725) + - zos_job_submit - Fixes the issue when `wait_time_s` was set to 0 that would + result in a `type` error that a stack trace would result in the response, + issue 670. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when a job encounters a security exception + no job log would would result in the response, issue 684. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when a job is configured for a syntax check + using TYPRUN=SCAN that it would wait the full duration set by `wait_time_s` + to return a response, issue 685. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when a job is configured for a syntax check + using TYPRUN=SCAN that no job log would result in the response, issue 685. + (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when a job is purged by the system that a + stack trace would result in the response, issue 681. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when invalid JCL syntax is submitted that + a stack trace would result in the response, issue 623. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue when resources (data sets) identified in + JCL did not exist such that a stack trace would result in the response, issue + 624. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_job_submit - Fixes the issue where the response did not include the job + log when a non-zero return code would occur, issue 655. (https://github.com/ansible-collections/ibm_zos_core/pull/683) + - zos_operator - fixed incorrect example descriptions and updated the doc to + highlight the deprecated option `wait`. (https://github.com/ansible-collections/ibm_zos_core/pull/648) + release_summary: "Release Date: '2023-04-21' This changelog describes all changes made to the modules and plugins included @@ -1442,181 +1449,181 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 641-copy-loadlib-member.yml - - 648-zos_operator-examples.yml - - 663-zos_gather_facts-update-docstring.yml - - 683-zos_job_submit-bugs.yml - - 725-zos_copy-encoding-bugs.yml - - 729-zos_operator-example-added.yml - - 739-zos_copy-volume-symbol-test.yml - - 746--Mode-set-for-files-is-applied-to-destination-directory.yml - - v1.5.0_summary.yml - release_date: '2023-04-21' + - 641-copy-loadlib-member.yml + - 648-zos_operator-examples.yml + - 663-zos_gather_facts-update-docstring.yml + - 683-zos_job_submit-bugs.yml + - 725-zos_copy-encoding-bugs.yml + - 729-zos_operator-example-added.yml + - 739-zos_copy-volume-symbol-test.yml + - 746--Mode-set-for-files-is-applied-to-destination-directory.yml + - v1.5.0_summary.yml + release_date: "2023-04-21" 1.5.0-beta.1: changes: bugfixes: - - zos_copy - Fixes a bug such that the module fails when copying files from - a directory needing also to be encoded. The failure would also delete the - `src` which was not desirable behavior. Fixes deletion of src on encoding - error. (https://github.com/ansible-collections/ibm_zos_core/pull/321). - - zos_copy - Fixes a bug where copying a member from a loadlib to another loadlib - fails. (https://github.com/ansible-collections/ibm_zos_core/pull/640) - - zos_copy - Fixes a bug where if a destination has accented characters in its - content, the module would fail when trying to determine if it is empty. (https://github.com/ansible-collections/ibm_zos_core/pull/634) - - zos_copy - Fixes a bug where the computed record length for a new destination - dataset would include newline characters. (https://github.com/ansible-collections/ibm_zos_core/pull/620) - - zos_copy - Fixes wrongful creation of destination backups when module option - `force` is true, creating emergency backups meant to restore the system to - its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) - - zos_copy - module was updated to correct a bug in the case when the destination - (dest) is a PDSE and the source (src) is a Unix Systems File (USS). The module - would fail in determining if the PDSE actually existed and try to create it - when it already existed resulting in an error that would prevent the module - from correctly executing. (https://github.com/ansible-collections/ibm_zos_core/pull/327) - - zos_data_set - Fixes a bug such that the module will delete a catalogued data - set over an uncatalogued data set even though the volume is provided for the - uncataloged data set. This is unexpected behavior and does not align to documentation; - correct behavior is that when a volume is provided that is the first place - the module should look for the data set, whether or not it is cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/325). - - zos_data_set - Fixes a bug where the default record format FB was actually - never enforced and when enforced it would cause VSAM creation to fail with - a Dynalloc failure. Also cleans up some of the options that are set by default - when they have no bearing for batch. (https://github.com/ansible-collections/ibm_zos_core/pull/647) - - zos_fetch - Updates the modules behavior when fetching VSAM data sets such - that the maximum record length is now determined when creating a temporary - data set to copy the VSAM data into and a variable-length (VB) data set is - used. (https://github.com/ansible-collections/ibm_zos_core/pull/350) - - zos_job_output - Fixes a bug that returned all ddname's when a specific ddnamae - was provided. Now a specific ddname can be returned and all others ignored. - (https://github.com/ansible-collections/ibm_zos_core/pull/334) - - zos_job_query - was updated to correct a boolean condition that always evaluated - to "CANCELLED". (https://github.com/ansible-collections/ibm_zos_core/pull/312). - - zos_mount - Fixes option `tag_ccsid` to correctly allow for type int. (https://github.com/ansible-collections/ibm_zos_core/pull/511) - - zos_mvs_raw - module was updated to correct a bug when no DD statements were - provided. The module when no option was provided for `dds` would error, a - default was provided to correct this behavior. (https://github.com/ansible-collections/ibm_zos_core/pull/336) - - zos_operator - Fixes case sensitive error checks, invalid, error & unidentifiable - (https://github.com/ansible-collections/ibm_zos_core/issues/389). - - zos_operator - Fixes such that specifying wait_time_s would throw an error - (https://github.com/ansible-collections/ibm_zos_core/issues/389). - - zos_operator - Fixes the wait_time_s to default to 1 second (https://github.com/ansible-collections/ibm_zos_core/issues/389). - - zos_operator - was updated to correct missing verbosity content when the option - verbose was set to True. zos_operator - was updated to correct the trailing - lines that would appear in the result content. (https://github.com/ansible-collections/ibm_zos_core/pull/400). + - zos_copy - Fixes a bug such that the module fails when copying files from + a directory needing also to be encoded. The failure would also delete the + `src` which was not desirable behavior. Fixes deletion of src on encoding + error. (https://github.com/ansible-collections/ibm_zos_core/pull/321). + - zos_copy - Fixes a bug where copying a member from a loadlib to another loadlib + fails. (https://github.com/ansible-collections/ibm_zos_core/pull/640) + - zos_copy - Fixes a bug where if a destination has accented characters in its + content, the module would fail when trying to determine if it is empty. (https://github.com/ansible-collections/ibm_zos_core/pull/634) + - zos_copy - Fixes a bug where the computed record length for a new destination + dataset would include newline characters. (https://github.com/ansible-collections/ibm_zos_core/pull/620) + - zos_copy - Fixes wrongful creation of destination backups when module option + `force` is true, creating emergency backups meant to restore the system to + its initial state in case of a module failure only when force is false. (https://github.com/ansible-collections/ibm_zos_core/pull/590) + - zos_copy - module was updated to correct a bug in the case when the destination + (dest) is a PDSE and the source (src) is a Unix Systems File (USS). The module + would fail in determining if the PDSE actually existed and try to create it + when it already existed resulting in an error that would prevent the module + from correctly executing. (https://github.com/ansible-collections/ibm_zos_core/pull/327) + - zos_data_set - Fixes a bug such that the module will delete a catalogued data + set over an uncatalogued data set even though the volume is provided for the + uncataloged data set. This is unexpected behavior and does not align to documentation; + correct behavior is that when a volume is provided that is the first place + the module should look for the data set, whether or not it is cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/325). + - zos_data_set - Fixes a bug where the default record format FB was actually + never enforced and when enforced it would cause VSAM creation to fail with + a Dynalloc failure. Also cleans up some of the options that are set by default + when they have no bearing for batch. (https://github.com/ansible-collections/ibm_zos_core/pull/647) + - zos_fetch - Updates the modules behavior when fetching VSAM data sets such + that the maximum record length is now determined when creating a temporary + data set to copy the VSAM data into and a variable-length (VB) data set is + used. (https://github.com/ansible-collections/ibm_zos_core/pull/350) + - zos_job_output - Fixes a bug that returned all ddname's when a specific ddnamae + was provided. Now a specific ddname can be returned and all others ignored. + (https://github.com/ansible-collections/ibm_zos_core/pull/334) + - zos_job_query - was updated to correct a boolean condition that always evaluated + to "CANCELLED". (https://github.com/ansible-collections/ibm_zos_core/pull/312). + - zos_mount - Fixes option `tag_ccsid` to correctly allow for type int. (https://github.com/ansible-collections/ibm_zos_core/pull/511) + - zos_mvs_raw - module was updated to correct a bug when no DD statements were + provided. The module when no option was provided for `dds` would error, a + default was provided to correct this behavior. (https://github.com/ansible-collections/ibm_zos_core/pull/336) + - zos_operator - Fixes case sensitive error checks, invalid, error & unidentifiable + (https://github.com/ansible-collections/ibm_zos_core/issues/389). + - zos_operator - Fixes such that specifying wait_time_s would throw an error + (https://github.com/ansible-collections/ibm_zos_core/issues/389). + - zos_operator - Fixes the wait_time_s to default to 1 second (https://github.com/ansible-collections/ibm_zos_core/issues/389). + - zos_operator - was updated to correct missing verbosity content when the option + verbose was set to True. zos_operator - was updated to correct the trailing + lines that would appear in the result content. (https://github.com/ansible-collections/ibm_zos_core/pull/400). deprecated_features: - - zos_encode - deprecates the module options `from_encoding` and `to_encoding` - to use suboptions `from` and `to` in order to remain consistent with all other - modules. (https://github.com/ansible-collections/ibm_zos_core/pull/345). - - zos_job_submit - Response 'message' property has been deprecated, all responses - are now in response property 'msg'. (https://github.com/ansible-collections/ibm_zos_core/issues/389). - - zos_job_submit - The 'wait' option has been deprecated because using option - 'wait_time_s' implies the job is going to wait. (https://github.com/ansible-collections/ibm_zos_core/issues/389). + - zos_encode - deprecates the module options `from_encoding` and `to_encoding` + to use suboptions `from` and `to` in order to remain consistent with all other + modules. (https://github.com/ansible-collections/ibm_zos_core/pull/345). + - zos_job_submit - Response 'message' property has been deprecated, all responses + are now in response property 'msg'. (https://github.com/ansible-collections/ibm_zos_core/issues/389). + - zos_job_submit - The 'wait' option has been deprecated because using option + 'wait_time_s' implies the job is going to wait. (https://github.com/ansible-collections/ibm_zos_core/issues/389). major_changes: - - ibm_zos_core - Updates the entire collection in that the collection no longer - depends on the managed node having installed System Display and Search Facility - (SDSF). Remove SDSF dependency from ibm_zos_core collection. (https://github.com/ansible-collections/ibm_zos_core/pull/303). + - ibm_zos_core - Updates the entire collection in that the collection no longer + depends on the managed node having installed System Display and Search Facility + (SDSF). Remove SDSF dependency from ibm_zos_core collection. (https://github.com/ansible-collections/ibm_zos_core/pull/303). minor_changes: - - module utility jobs - was updated to remove the usage of REXX and replaced - with ZOAU python APIs. This reduces code replication and it removes the need - for REXX interpretation which increases performance. (https://github.com/ansible-collections/ibm_zos_core/pull/312). - - module utils backup - updates the module with a new option named tmp_hlq. - This allows for a user to specify the data set high level qualifier (HLQ) - used in any temporary data set created by the module. Often, the defaults - are not permitted on systems, this provides a way to override the defaults. - (https://github.com/ansible-collections/ibm_zos_core/pull/341). - - module utils dd_statement- updates the module with a new option named tmp_hlq. - This allows for a user to specify the data set high level qualifier (HLQ) - used in any temporary data set created by the module. Often, the defaults - are not permitted on systems, this provides a way to override the defaults. - (https://github.com/ansible-collections/ibm_zos_core/pull/341). - - module utils encode - updates the module with a new option named tmp_hlq. - This allows for a user to specify the data set high level qualifier (HLQ) - used in any temporary data set created by the module. Often, the defaults - are not permitted on systems, this provides a way to override the defaults. - (https://github.com/ansible-collections/ibm_zos_core/pull/341). - - zos_apf - updates the module with a new option named tmp_hlq. This allows - for a user to specify the data set high level qualifier (HLQ) used in any - temporary data set created by the module. Often, the defaults are not permitted - on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). - - zos_blockinfile - fixes a bug when using double quotes in the block text of - the module. When double quotes appeared in block text, the module would error - differently depending on the usage of option insertafter. Examples of this - error have return code 1 or 16 along with message "ZOAU dmod return content - is NOT in json format" and a varying stderr. (https://github.com/ansible-collections/ibm_zos_core/pull/303). - - zos_blockinfile - updates the module with a new option named force. This allows - for a user to specify that the data set can be shared with others during an - update which results in the data set you are updating to be simultaneously - updated by others. (https://github.com/ansible-collections/ibm_zos_core/pull/316). - - zos_blockinfile - updates the module with a new option named indentation. - This allows for a user to specify a number of spaces to prepend to the content - before being inserted into the destination. (https://github.com/ansible-collections/ibm_zos_core/pull/317). - - zos_blockinfile - updates the module with a new option named tmp_hlq. This - allows for a user to specify the data set high level qualifier (HLQ) used - in any temporary data set created by the module. Often, the defaults are not - permitted on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). - - zos_copy - updates the module with a new option named tmp_hlq. This allows - for a user to specify the data set high level qualifier (HLQ) used in any - temporary data set created by the module. Often, the defaults are not permitted - on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). - - zos_data_set - Ensures that temporary datasets created by zos_data_set use - the tmp_hlq specified. This allows for a user to specify the data set high - level qualifier (HLQ) used in any temporary data set created by the module. - Often, the defaults are not permitted on systems, this provides a way to override - the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/491). - - zos_encode - updates the module with a new option named tmp_hlq. This allows - for a user to specify the data set high level qualifier (HLQ) used in any - temporary data set created by the module. Often, the defaults are not permitted - on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). - - zos_fetch - updates the module with a new option named tmp_hlq. This allows - for a user to specify the data set high level qualifier (HLQ) used in any - temporary data set created by the module. Often, the defaults are not permitted - on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). - - zos_gather_facts - is a new module that can discover facts about the managed - z/OS target. This module leverages the zinfo utility offered by ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/322). - - zos_job_output - was updated to leverage the latest changes that removes the - REXX code by calling the module utility jobs. (https://github.com/ansible-collections/ibm_zos_core/pull/312). - - zos_job_query - was updated to leverage the latest changes that removes the - REXX code by calling the module utility jobs. (https://github.com/ansible-collections/ibm_zos_core/pull/312). - - zos_job_query - was updated to use the jobs module utility. (https://github.com/ansible-collections/ibm_zos_core/pull/312). - - zos_job_submit - The architecture changed such that the entire modules execution - time now captured in the duration time which includes job submission and log - collection. If a job does not return by the default 10 sec 'wait_time_s' value, - it can be increased up to 86400 seconds. (https://github.com/ansible-collections/ibm_zos_core/issues/389). - - zos_job_submit - behavior changed when a volume is defined in the module options - such that it will catalog the data set if it is not cataloged and submit the - job. In the past, the function did not catalog the data set and instead performed - I/O operations and then submitted the job. This behavior aligns to other module - behaviors and reduces the possibility to encounter a permissions issue. (https://github.com/ansible-collections/ibm_zos_core/issues/389). - - zos_job_submit - was updated to include an additional error code condition - JCLERR. (https://github.com/ansible-collections/ibm_zos_core/pull/312) - - zos_lineinfile - updates the module with a new option named tmp_hlq. This - allows for a user to specify the data set high level qualifier (HLQ) used - in any temporary data set created by the module. Often, the defaults are not - permitted on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). - - zos_mount - updates the module with a new option named tmp_hlq. This allows - for a user to specify the data set high level qualifier (HLQ) used in any - temporary data set created by the module. Often, the defaults are not permitted - on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). - - zos_mvs_raw - Ensures that temporary datasets created by DD Statements use - the tmp_hlq specified. This allows for a user to specify the data set high - level qualifier (HLQ) used in any temporary data set created by the module. - Often, the defaults are not permitted on systems, this provides a way to override - the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/414). - - zos_mvs_raw - updates the module with a new option named tmp_hlq. This allows - for a user to specify the data set high level qualifier (HLQ) used in any - temporary data set created by the module. Often, the defaults are not permitted - on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). - - zos_operator - added in the response the cmd result (https://github.com/ansible-collections/ibm_zos_core/issues/389). - - zos_operator - added in the response the elapsed time (https://github.com/ansible-collections/ibm_zos_core/issues/389). - - zos_operator - added in the response the wait_time_s set (https://github.com/ansible-collections/ibm_zos_core/issues/389). - - zos_operator - deprecated the wait option, not needed with wait_time_s minor_changes - (https://github.com/ansible-collections/ibm_zos_core/issues/389). - - zos_operator - was updated to remove the usage of REXX and replaced with ZOAU - python APIs. This reduces code replication and it removes the need for REXX - interpretation which increases performance. (https://github.com/ansible-collections/ibm_zos_core/pull/312). - release_summary: 'Release Date: ''2022-11-17'' + - module utility jobs - was updated to remove the usage of REXX and replaced + with ZOAU python APIs. This reduces code replication and it removes the need + for REXX interpretation which increases performance. (https://github.com/ansible-collections/ibm_zos_core/pull/312). + - module utils backup - updates the module with a new option named tmp_hlq. + This allows for a user to specify the data set high level qualifier (HLQ) + used in any temporary data set created by the module. Often, the defaults + are not permitted on systems, this provides a way to override the defaults. + (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - module utils dd_statement- updates the module with a new option named tmp_hlq. + This allows for a user to specify the data set high level qualifier (HLQ) + used in any temporary data set created by the module. Often, the defaults + are not permitted on systems, this provides a way to override the defaults. + (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - module utils encode - updates the module with a new option named tmp_hlq. + This allows for a user to specify the data set high level qualifier (HLQ) + used in any temporary data set created by the module. Often, the defaults + are not permitted on systems, this provides a way to override the defaults. + (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - zos_apf - updates the module with a new option named tmp_hlq. This allows + for a user to specify the data set high level qualifier (HLQ) used in any + temporary data set created by the module. Often, the defaults are not permitted + on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - zos_blockinfile - fixes a bug when using double quotes in the block text of + the module. When double quotes appeared in block text, the module would error + differently depending on the usage of option insertafter. Examples of this + error have return code 1 or 16 along with message "ZOAU dmod return content + is NOT in json format" and a varying stderr. (https://github.com/ansible-collections/ibm_zos_core/pull/303). + - zos_blockinfile - updates the module with a new option named force. This allows + for a user to specify that the data set can be shared with others during an + update which results in the data set you are updating to be simultaneously + updated by others. (https://github.com/ansible-collections/ibm_zos_core/pull/316). + - zos_blockinfile - updates the module with a new option named indentation. + This allows for a user to specify a number of spaces to prepend to the content + before being inserted into the destination. (https://github.com/ansible-collections/ibm_zos_core/pull/317). + - zos_blockinfile - updates the module with a new option named tmp_hlq. This + allows for a user to specify the data set high level qualifier (HLQ) used + in any temporary data set created by the module. Often, the defaults are not + permitted on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - zos_copy - updates the module with a new option named tmp_hlq. This allows + for a user to specify the data set high level qualifier (HLQ) used in any + temporary data set created by the module. Often, the defaults are not permitted + on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - zos_data_set - Ensures that temporary datasets created by zos_data_set use + the tmp_hlq specified. This allows for a user to specify the data set high + level qualifier (HLQ) used in any temporary data set created by the module. + Often, the defaults are not permitted on systems, this provides a way to override + the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/491). + - zos_encode - updates the module with a new option named tmp_hlq. This allows + for a user to specify the data set high level qualifier (HLQ) used in any + temporary data set created by the module. Often, the defaults are not permitted + on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - zos_fetch - updates the module with a new option named tmp_hlq. This allows + for a user to specify the data set high level qualifier (HLQ) used in any + temporary data set created by the module. Often, the defaults are not permitted + on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - zos_gather_facts - is a new module that can discover facts about the managed + z/OS target. This module leverages the zinfo utility offered by ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/322). + - zos_job_output - was updated to leverage the latest changes that removes the + REXX code by calling the module utility jobs. (https://github.com/ansible-collections/ibm_zos_core/pull/312). + - zos_job_query - was updated to leverage the latest changes that removes the + REXX code by calling the module utility jobs. (https://github.com/ansible-collections/ibm_zos_core/pull/312). + - zos_job_query - was updated to use the jobs module utility. (https://github.com/ansible-collections/ibm_zos_core/pull/312). + - zos_job_submit - The architecture changed such that the entire modules execution + time now captured in the duration time which includes job submission and log + collection. If a job does not return by the default 10 sec 'wait_time_s' value, + it can be increased up to 86400 seconds. (https://github.com/ansible-collections/ibm_zos_core/issues/389). + - zos_job_submit - behavior changed when a volume is defined in the module options + such that it will catalog the data set if it is not cataloged and submit the + job. In the past, the function did not catalog the data set and instead performed + I/O operations and then submitted the job. This behavior aligns to other module + behaviors and reduces the possibility to encounter a permissions issue. (https://github.com/ansible-collections/ibm_zos_core/issues/389). + - zos_job_submit - was updated to include an additional error code condition + JCLERR. (https://github.com/ansible-collections/ibm_zos_core/pull/312) + - zos_lineinfile - updates the module with a new option named tmp_hlq. This + allows for a user to specify the data set high level qualifier (HLQ) used + in any temporary data set created by the module. Often, the defaults are not + permitted on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - zos_mount - updates the module with a new option named tmp_hlq. This allows + for a user to specify the data set high level qualifier (HLQ) used in any + temporary data set created by the module. Often, the defaults are not permitted + on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - zos_mvs_raw - Ensures that temporary datasets created by DD Statements use + the tmp_hlq specified. This allows for a user to specify the data set high + level qualifier (HLQ) used in any temporary data set created by the module. + Often, the defaults are not permitted on systems, this provides a way to override + the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/414). + - zos_mvs_raw - updates the module with a new option named tmp_hlq. This allows + for a user to specify the data set high level qualifier (HLQ) used in any + temporary data set created by the module. Often, the defaults are not permitted + on systems, this provides a way to override the defaults. (https://github.com/ansible-collections/ibm_zos_core/pull/341). + - zos_operator - added in the response the cmd result (https://github.com/ansible-collections/ibm_zos_core/issues/389). + - zos_operator - added in the response the elapsed time (https://github.com/ansible-collections/ibm_zos_core/issues/389). + - zos_operator - added in the response the wait_time_s set (https://github.com/ansible-collections/ibm_zos_core/issues/389). + - zos_operator - deprecated the wait option, not needed with wait_time_s minor_changes + (https://github.com/ansible-collections/ibm_zos_core/issues/389). + - zos_operator - was updated to remove the usage of REXX and replaced with ZOAU + python APIs. This reduces code replication and it removes the need for REXX + interpretation which increases performance. (https://github.com/ansible-collections/ibm_zos_core/pull/312). + release_summary: "Release Date: '2022-11-17' This changelog describes all changes made to the modules and plugins included @@ -1626,68 +1633,68 @@ releases: the collections `release notes `__ - ' + " fragments: - - 303-correct-double-quotes.yml - - 312-boolean-condition.yml - - 312-update-code-to-zoau-1.2.0-api.yml - - 316-update-zos-blockinfile-force.yml - - 317-update-zos-blockinfile-prepend-spaces.yml - - 318-sdsf-dependency-removed.yml - - 321-fixes-deleting-src-on-encoding-error.yml - - 322-new-zos-gather-facts.yml - - 325-fixes-deleting-cataloged-data-set.yml - - 327-update-zos_copy-uss-to-pdse.yml - - 334-display-specific-ddname.yml - - 334-words-matter.yml - - 336-update-mvs_raw_no_dd_stmt.yml - - 341-update-collection-tmphlq.yml - - 345-update-encoding-options.yml - - 350-update-zos_fetch-vsam.yml - - 389-fixes-zos-operator-timeout-option.yml - - 393-tmphlq-dd-statements.yml - - 400-fixes-verbose-trailing-lines.yml - - 491-tmp-hlq-zos_data_set.yml - - 511-update-ccsid-type-int.yml - - 546-enable-zos_operator-func-test.yml - - 548-job-output-remove-workaround.yml - - 549-job-submit-remove-jcl-uss-patch.yml - - 550-trivial-mvs_zos_raw.yml - - 552-zos_job_submit-max-rc.yaml - - 588-zos_copy-emergenxy-backup.yml - - 599-copy-carriage-return.yml - - 600-zos_copy-special-characters.yml - - 601-copy-loadlib-member.yml - - 627-all-modules.yml - - 647-zos_data_set_record_format.yml - - 650-doc-meta-data-updates.yml - - v1.5.0-beta.1_summary.yml + - 303-correct-double-quotes.yml + - 312-boolean-condition.yml + - 312-update-code-to-zoau-1.2.0-api.yml + - 316-update-zos-blockinfile-force.yml + - 317-update-zos-blockinfile-prepend-spaces.yml + - 318-sdsf-dependency-removed.yml + - 321-fixes-deleting-src-on-encoding-error.yml + - 322-new-zos-gather-facts.yml + - 325-fixes-deleting-cataloged-data-set.yml + - 327-update-zos_copy-uss-to-pdse.yml + - 334-display-specific-ddname.yml + - 334-words-matter.yml + - 336-update-mvs_raw_no_dd_stmt.yml + - 341-update-collection-tmphlq.yml + - 345-update-encoding-options.yml + - 350-update-zos_fetch-vsam.yml + - 389-fixes-zos-operator-timeout-option.yml + - 393-tmphlq-dd-statements.yml + - 400-fixes-verbose-trailing-lines.yml + - 491-tmp-hlq-zos_data_set.yml + - 511-update-ccsid-type-int.yml + - 546-enable-zos_operator-func-test.yml + - 548-job-output-remove-workaround.yml + - 549-job-submit-remove-jcl-uss-patch.yml + - 550-trivial-mvs_zos_raw.yml + - 552-zos_job_submit-max-rc.yaml + - 588-zos_copy-emergenxy-backup.yml + - 599-copy-carriage-return.yml + - 600-zos_copy-special-characters.yml + - 601-copy-loadlib-member.yml + - 627-all-modules.yml + - 647-zos_data_set_record_format.yml + - 650-doc-meta-data-updates.yml + - v1.5.0-beta.1_summary.yml modules: - - description: Gather z/OS system facts. - name: zos_gather_facts - namespace: '' - release_date: '2022-11-02' + - description: Gather z/OS system facts. + name: zos_gather_facts + namespace: "" + release_date: "2022-11-02" 1.6.0: changes: bugfixes: - - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM - data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/816). - - zos_copy - Encoding normalization used to handle newlines in text files was - applied to binary files too. Fix makes sure that binary files bypass this - normalization. (https://github.com/ansible-collections/ibm_zos_core/pull/810) - - zos_copy - Reported a warning about the use of _play_context.verbosity.This - change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/814). - - zos_copy - kept permissions on target directory when copy overwrote files. - The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/790) - - zos_data_set - Reported a failure caused when `present=absent` for a VSAM - data set leaving behind cluster components. Fix introduces a new logical flow - that will evaluate the volumes, compare it to the provided value and if necessary - catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/816). - - zos_fetch - Reported a warning about the use of _play_context.verbosity.This - change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/814). - - zos_gather_facts - Fixes an issue in the zoau version checker which prevented - the zos_gather_facts module from running with newer versions of ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/797) - release_summary: 'Release Date: ''2023-06-23'' + - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM + data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/816). + - zos_copy - Encoding normalization used to handle newlines in text files was + applied to binary files too. Fix makes sure that binary files bypass this + normalization. (https://github.com/ansible-collections/ibm_zos_core/pull/810) + - zos_copy - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/814). + - zos_copy - kept permissions on target directory when copy overwrote files. + The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/790) + - zos_data_set - Reported a failure caused when `present=absent` for a VSAM + data set leaving behind cluster components. Fix introduces a new logical flow + that will evaluate the volumes, compare it to the provided value and if necessary + catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/816). + - zos_fetch - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/814). + - zos_gather_facts - Fixes an issue in the zoau version checker which prevented + the zos_gather_facts module from running with newer versions of ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/797) + release_summary: "Release Date: '2023-06-23' This changelog describes all changes made to the modules and plugins included @@ -1695,61 +1702,61 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 783_fix_zoau_version_checker.yml - - 790_overwrite_permissions_on_copy.yml - - 810_fix_binary_file_bypass.yml - - 813-ansible-lint.yml - - 814-zos_data_set-update-vsam-copy.yml - - 816-zos_data_set-update-vsam.yml - - v1.6.0_summary.yml - release_date: '2023-06-23' + - 783_fix_zoau_version_checker.yml + - 790_overwrite_permissions_on_copy.yml + - 810_fix_binary_file_bypass.yml + - 813-ansible-lint.yml + - 814-zos_data_set-update-vsam-copy.yml + - 816-zos_data_set-update-vsam.yml + - v1.6.0_summary.yml + release_date: "2023-06-23" 1.6.0-beta.1: changes: bugfixes: - - Fixed wrong error message when a USS source is not found, aligning with a - similar error message from zos_blockinfile "{src} does not exist". - - zos_blockinfile - was unable to use double quotes which prevented some use - cases and did not display an approriate message. The fix now allows for double - quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) - - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an - error while computing the record length for a new destination dataset. Issue - 664. (https://github.com/ansible-collections/ibm_zos_core/pull/743) - - zos_copy - Fixes a bug where the code for fixing an issue with newlines in - files (issue 599) would use the wrong encoding for normalization. Issue 678. - (https://github.com/ansible-collections/ibm_zos_core/pull/743) - - zos_encode - fixes a bug where converted files were not tagged afterwards - with the new code set. (https://github.com/ansible-collections/ibm_zos_core/pull/534) - - zos_find - fixes a bug where find result values stopped being returned after - first value in a list was 'not found'. (https://github.com/ansible-collections/ibm_zos_core/pull/668) - - zos_lineinfile - Fixed a bug where a Python f-string was used and thus removed - to ensure support for Python 2.7 on the controller. (https://github.com/ansible-collections/ibm_zos_core/pull/659) + - Fixed wrong error message when a USS source is not found, aligning with a + similar error message from zos_blockinfile "{src} does not exist". + - zos_blockinfile - was unable to use double quotes which prevented some use + cases and did not display an approriate message. The fix now allows for double + quotes to be used with the module. (https://github.com/ansible-collections/ibm_zos_core/pull/680) + - zos_copy - Fixes a bug where files not encoded in IBM-1047 would trigger an + error while computing the record length for a new destination dataset. Issue + 664. (https://github.com/ansible-collections/ibm_zos_core/pull/743) + - zos_copy - Fixes a bug where the code for fixing an issue with newlines in + files (issue 599) would use the wrong encoding for normalization. Issue 678. + (https://github.com/ansible-collections/ibm_zos_core/pull/743) + - zos_encode - fixes a bug where converted files were not tagged afterwards + with the new code set. (https://github.com/ansible-collections/ibm_zos_core/pull/534) + - zos_find - fixes a bug where find result values stopped being returned after + first value in a list was 'not found'. (https://github.com/ansible-collections/ibm_zos_core/pull/668) + - zos_lineinfile - Fixed a bug where a Python f-string was used and thus removed + to ensure support for Python 2.7 on the controller. (https://github.com/ansible-collections/ibm_zos_core/pull/659) major_changes: - - zos_volume_init - Introduces new module to handle volume (or minidisk) initialization. - (https://github.com/ansible-collections/ibm_zos_core/pull/654) + - zos_volume_init - Introduces new module to handle volume (or minidisk) initialization. + (https://github.com/ansible-collections/ibm_zos_core/pull/654) minor_changes: - - Updated the text converter import from "from ansible.module_utils._text" to - "from ansible.module_utils.common.text.converters" to remove warning".. warn - Use ansible.module_utils.common.text.converters instead.". (https://github.com/ansible-collections/ibm_zos_core/pull/602) - - module_utils - job.py utility did not support positional wiled card placement, - this enhancement uses `fnmatch` logic to support wild cards. - - zos_copy - Fixed a bug where the module would change the mode for a directory - when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/723) - - zos_copy - was enhanced to keep track of modified members in a destination - dataset, restoring them to their previous state in case of a failure. (https://github.com/ansible-collections/ibm_zos_core/pull/551) - - zos_data_set - add force parameter to enable member delete while pdse is in - use (https://github.com/ansible-collections/ibm_zos_core/pull/718). - - zos_job_query - ansible module does not support positional wild card placement - for `job_name1 or `job_id`. This enhancement allows embedded wildcards throughout - the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) - - zos_lineinfile - would access data sets with exclusive access so no other - task can read the data, this enhancement allows for a data set to be opened - with a disposition set to share so that other tasks can access the data when - option `force` is set to `true`. (https://github.com/ansible-collections/ibm_zos_core/pull/731) - - zos_tso_command - was enhanced to accept `max_rc` as an option. This option - allows a non-zero return code to succeed as a valid return code. (https://github.com/ansible-collections/ibm_zos_core/pull/666) - release_summary: 'Release Date: ''2023-04-26'' + - Updated the text converter import from "from ansible.module_utils._text" to + "from ansible.module_utils.common.text.converters" to remove warning".. warn + Use ansible.module_utils.common.text.converters instead.". (https://github.com/ansible-collections/ibm_zos_core/pull/602) + - module_utils - job.py utility did not support positional wiled card placement, + this enhancement uses `fnmatch` logic to support wild cards. + - zos_copy - Fixed a bug where the module would change the mode for a directory + when copying into it the contents of another. (https://github.com/ansible-collections/ibm_zos_core/pull/723) + - zos_copy - was enhanced to keep track of modified members in a destination + dataset, restoring them to their previous state in case of a failure. (https://github.com/ansible-collections/ibm_zos_core/pull/551) + - zos_data_set - add force parameter to enable member delete while pdse is in + use (https://github.com/ansible-collections/ibm_zos_core/pull/718). + - zos_job_query - ansible module does not support positional wild card placement + for `job_name1 or `job_id`. This enhancement allows embedded wildcards throughout + the `job_name` and `job_id`. (https://github.com/ansible-collections/ibm_zos_core/pull/721) + - zos_lineinfile - would access data sets with exclusive access so no other + task can read the data, this enhancement allows for a data set to be opened + with a disposition set to share so that other tasks can access the data when + option `force` is set to `true`. (https://github.com/ansible-collections/ibm_zos_core/pull/731) + - zos_tso_command - was enhanced to accept `max_rc` as an option. This option + allows a non-zero return code to succeed as a valid return code. (https://github.com/ansible-collections/ibm_zos_core/pull/666) + release_summary: "Release Date: '2023-04-26' This changelog describes all changes made to the modules and plugins included @@ -1757,34 +1764,34 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 309-replace-text-zos-encode.yml - - 323-zos-job-query-handle-multiple-wildcards.yml - - 358-zos-data-set-support-disposition-shr.yml - - 408-restore-members-on-failure.yml - - 417-can-quotes-in-content-can-be-supported.yml - - 574-zos_find_stoppedonnotfound.yml - - 584-zos_lineinfile-error-message.yml - - 602-text-converter-import.yml - - 619-Mode-set-for-files-is-applied-to-destination-directory.yml - - 654-new-module-zos_volume_init.yml - - 659-zos-lineinfile-f-string.yml - - 666-zos_tso_command_maxrc.yml - - 727-zos-blockinfile-examples.yml - - 731-zos_linefile-disposition_share.yaml - - 734-copy-loadlib-member-test-case.yml - - 740-zos_copy-volume-symbol-test.yml - - 743-zos_copy-encoding-bugs.yml - - v1.6.0-beta.1_summary.yml + - 309-replace-text-zos-encode.yml + - 323-zos-job-query-handle-multiple-wildcards.yml + - 358-zos-data-set-support-disposition-shr.yml + - 408-restore-members-on-failure.yml + - 417-can-quotes-in-content-can-be-supported.yml + - 574-zos_find_stoppedonnotfound.yml + - 584-zos_lineinfile-error-message.yml + - 602-text-converter-import.yml + - 619-Mode-set-for-files-is-applied-to-destination-directory.yml + - 654-new-module-zos_volume_init.yml + - 659-zos-lineinfile-f-string.yml + - 666-zos_tso_command_maxrc.yml + - 727-zos-blockinfile-examples.yml + - 731-zos_linefile-disposition_share.yaml + - 734-copy-loadlib-member-test-case.yml + - 740-zos_copy-volume-symbol-test.yml + - 743-zos_copy-encoding-bugs.yml + - v1.6.0-beta.1_summary.yml modules: - - description: Initialize volumes or minidisks. - name: zos_volume_init - namespace: '' - release_date: '2023-04-26' + - description: Initialize volumes or minidisks. + name: zos_volume_init + namespace: "" + release_date: "2023-04-26" 1.7.0: changes: - release_summary: 'Release Date: ''2023-10-09'' + release_summary: "Release Date: '2023-10-09' This changelog describes all changes made to the modules and plugins included @@ -1792,75 +1799,75 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - v1.7.0_summary.yml - release_date: '2023-10-09' + - v1.7.0_summary.yml + release_date: "2023-10-09" 1.7.0-beta.1: changes: bugfixes: - - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM - data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791). - - zos_blockinfile - Test case generate a data set that was not correctly removed. - Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) - - zos_copy - Module returned the dynamic values created with the same dataset - type and record format. Fix validate the correct dataset type and record format - of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824) - - zos_copy - Reported a false positive such that the response would have `changed=true` - when copying from a source (src) or destination (dest) data set that was in - use (DISP=SHR). This change now displays an appropriate error message and - returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794). - - zos_copy - Reported a warning about the use of _play_context.verbosity.This - change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). - - zos_copy - Test case for recursive encoding directories reported a UTF-8 failure. - This change ensures proper test coverage for nested directories and file permissions. - (https://github.com/ansible-collections/ibm_zos_core/pull/806). - - zos_copy - Zos_copy did not encode inner content inside subdirectories once - the source was copied to the destination. Fix now encodes all content in a - source directory, including subdirectories. (https://github.com/ansible-collections/ibm_zos_core/pull/772). - - zos_copy - kept permissions on target directory when copy overwrote files. - The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/795) - - zos_data_set - Reported a failure caused when `present=absent` for a VSAM - data set leaving behind cluster components. Fix introduces a new logical flow - that will evaluate the volumes, compare it to the provided value and if necessary - catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/791). - - zos_fetch - Reported a warning about the use of _play_context.verbosity.This - change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). - - zos_job_output - Error message did not specify the job not found. Fix now - specifies the job_id or job_name being searched to ensure more information - is given back to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/747) - - zos_operator - Reported a failure caused by unrelated error response. Fix - now gives a transparent response of the operator to avoid false negatives. - (https://github.com/ansible-collections/ibm_zos_core/pull/762). + - module_utils - data_set.py - Reported a failure caused when cataloging a VSAM + data set. Fix now corrects how VSAM data sets are cataloged. (https://github.com/ansible-collections/ibm_zos_core/pull/791). + - zos_blockinfile - Test case generate a data set that was not correctly removed. + Changes delete the correct data set not only member. (https://github.com/ansible-collections/ibm_zos_core/pull/840) + - zos_copy - Module returned the dynamic values created with the same dataset + type and record format. Fix validate the correct dataset type and record format + of target created. (https://github.com/ansible-collections/ibm_zos_core/pull/824) + - zos_copy - Reported a false positive such that the response would have `changed=true` + when copying from a source (src) or destination (dest) data set that was in + use (DISP=SHR). This change now displays an appropriate error message and + returns `changed=false`. (https://github.com/ansible-collections/ibm_zos_core/pull/794). + - zos_copy - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). + - zos_copy - Test case for recursive encoding directories reported a UTF-8 failure. + This change ensures proper test coverage for nested directories and file permissions. + (https://github.com/ansible-collections/ibm_zos_core/pull/806). + - zos_copy - Zos_copy did not encode inner content inside subdirectories once + the source was copied to the destination. Fix now encodes all content in a + source directory, including subdirectories. (https://github.com/ansible-collections/ibm_zos_core/pull/772). + - zos_copy - kept permissions on target directory when copy overwrote files. + The fix now set permissions when mode is given. (https://github.com/ansible-collections/ibm_zos_core/pull/795) + - zos_data_set - Reported a failure caused when `present=absent` for a VSAM + data set leaving behind cluster components. Fix introduces a new logical flow + that will evaluate the volumes, compare it to the provided value and if necessary + catalog and delete. (https://github.com/ansible-collections/ibm_zos_core/pull/791). + - zos_fetch - Reported a warning about the use of _play_context.verbosity.This + change corrects the module action to prevent the warning message. (https://github.com/ansible-collections/ibm_zos_core/pull/806). + - zos_job_output - Error message did not specify the job not found. Fix now + specifies the job_id or job_name being searched to ensure more information + is given back to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/747) + - zos_operator - Reported a failure caused by unrelated error response. Fix + now gives a transparent response of the operator to avoid false negatives. + (https://github.com/ansible-collections/ibm_zos_core/pull/762). major_changes: - - zos_copy - Previously, backups were taken when force was set to false; whether - or not a user specified this operation which caused allocation issues with - space and permissions. This removes the automatic backup performed and reverts - to the original logic in that backups must be initiated by the user. (https://github.com/ansible-collections/ibm_zos_core/pull/896) + - zos_copy - Previously, backups were taken when force was set to false; whether + or not a user specified this operation which caused allocation issues with + space and permissions. This removes the automatic backup performed and reverts + to the original logic in that backups must be initiated by the user. (https://github.com/ansible-collections/ibm_zos_core/pull/896) minor_changes: - - Add support for Jinja2 templates in zos_copy and zos_job_submit when using - local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667) - - zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, - space_type and type in the return output when the destination data set does - not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773) - - zos_data_set - record format = 'F' has been added to support 'fixed' block - records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821) - - zos_job_output - zoau added 'program_name' to their field output starting - with v1.2.4. This enhancement checks for that version and passes the extra - column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) - - zos_job_query - Adds new fields job_class, svc_class, priority, asid, creation_datetime, - and queue_position to the return output when querying or submitting a job. - Available when using ZOAU v1.2.3 or greater. (https://github.com/ansible-collections/ibm_zos_core/pull/778) - - zos_job_query - unnecessary calls were made to find a jobs DDs that incurred - unnecessary overhead. This change removes those resulting in a performance - increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911) - - zos_job_query - zoau added 'program_name' to their field output starting with - v1.2.4. This enhancement checks for that version and passes the extra column - through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) - - zos_job_submit - zoau added 'program_name' to their field output starting - with v1.2.4. This enhancement checks for that version and passes the extra - column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) - release_summary: 'Release Date: ''2023-07-26'' + - Add support for Jinja2 templates in zos_copy and zos_job_submit when using + local source files. (https://github.com/ansible-collections/ibm_zos_core/pull/667) + - zos_copy - Adds block_size, record_format, record_length, space_primary, space_secondary, + space_type and type in the return output when the destination data set does + not exist and has to be created by the module. (https://github.com/ansible-collections/ibm_zos_core/pull/773) + - zos_data_set - record format = 'F' has been added to support 'fixed' block + records. This allows records that can use the entire block. (https://github.com/ansible-collections/ibm_zos_core/pull/821) + - zos_job_output - zoau added 'program_name' to their field output starting + with v1.2.4. This enhancement checks for that version and passes the extra + column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) + - zos_job_query - Adds new fields job_class, svc_class, priority, asid, creation_datetime, + and queue_position to the return output when querying or submitting a job. + Available when using ZOAU v1.2.3 or greater. (https://github.com/ansible-collections/ibm_zos_core/pull/778) + - zos_job_query - unnecessary calls were made to find a jobs DDs that incurred + unnecessary overhead. This change removes those resulting in a performance + increase in job related queries. (https://github.com/ansible-collections/ibm_zos_core/pull/911) + - zos_job_query - zoau added 'program_name' to their field output starting with + v1.2.4. This enhancement checks for that version and passes the extra column + through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) + - zos_job_submit - zoau added 'program_name' to their field output starting + with v1.2.4. This enhancement checks for that version and passes the extra + column through. (https://github.com/ansible-collections/ibm_zos_core/pull/841) + release_summary: "Release Date: '2023-07-26' This changelog describes all changes made to the modules and plugins included @@ -1868,56 +1875,56 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 439-add-f-recordtype.yml - - 667-template-support.yml - - 696-zos-copy-remove-emergency-backup.yml - - 747-failed_when_the_job_name_was_null_or_not_found.yaml - - 762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml - - 766-ac-command-replace-makefile.yml - - 771-update-ansible-version.yaml - - 772-Encode-files-recursively-and-test-case-for-keep-behavior.yml - - 773-return-dynamically-created-dest-attrs.yaml - - 778-query-new-fields.yml - - 789-ac-command-add-test.yml - - 789-ac-command-updates.yml - - 791-zos_data_set-update-vsam-copy.yml - - 794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml - - 795_overwrite_permissions_on_copy.yml - - 806-zos_copy_fetch-display-verbose.yml - - 812-ansible-lint.yml - - 824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml - - 839-Add-Field-to-zos-job-query.yml - - 840-redesign-test-cases.yml - - 880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml - - 906-update-doc-generation.yml - - 911-Improve-job-query-performance.yml - - v1.7.0-beta.1_summary.yml + - 439-add-f-recordtype.yml + - 667-template-support.yml + - 696-zos-copy-remove-emergency-backup.yml + - 747-failed_when_the_job_name_was_null_or_not_found.yaml + - 762-zos-operator-reported-failure-caused-by-unrelated-error-response.yaml + - 766-ac-command-replace-makefile.yml + - 771-update-ansible-version.yaml + - 772-Encode-files-recursively-and-test-case-for-keep-behavior.yml + - 773-return-dynamically-created-dest-attrs.yaml + - 778-query-new-fields.yml + - 789-ac-command-add-test.yml + - 789-ac-command-updates.yml + - 791-zos_data_set-update-vsam-copy.yml + - 794-zos_copy_report_error_message_when_concurrent_copy_fails.yaml + - 795_overwrite_permissions_on_copy.yml + - 806-zos_copy_fetch-display-verbose.yml + - 812-ansible-lint.yml + - 824_Return_destination_attributes_had_hardcoded_type_and_recordformat.yml + - 839-Add-Field-to-zos-job-query.yml + - 840-redesign-test-cases.yml + - 880-Functional_tso_command_test_cases_currently_can_not_be_run_concurrently.yml + - 906-update-doc-generation.yml + - 911-Improve-job-query-performance.yml + - v1.7.0-beta.1_summary.yml modules: - - description: Archive files and data sets on z/OS. - name: zos_archive - namespace: '' - - description: Unarchive files and data sets in z/OS. - name: zos_unarchive - namespace: '' - release_date: '2023-07-26' + - description: Archive files and data sets on z/OS. + name: zos_archive + namespace: "" + - description: Unarchive files and data sets in z/OS. + name: zos_unarchive + namespace: "" + release_date: "2023-07-26" 1.7.0-beta.2: changes: bugfixes: - - zos_archive - Module did not return the proper src state after archiving. - Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + - zos_archive - Module did not return the proper src state after archiving. + Fix now displays the status of the src after the operation. (https://github.com/ansible-collections/ibm_zos_core/pull/930). minor_changes: - - zos_archive - If destination data set space is not provided then the module - computes it based on the src list and/or expanded src list based on pattern - provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - - zos_archive - When xmit faces a space error in xmit operation because of dest - or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - - zos_unarchive - When copying to remote fails now a proper error message is - displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - - zos_unarchive - When copying to remote if space_primary is not defined, then - is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). - release_summary: 'Release Date: ''2023-08-21'' + - zos_archive - If destination data set space is not provided then the module + computes it based on the src list and/or expanded src list based on pattern + provided. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + - zos_archive - When xmit faces a space error in xmit operation because of dest + or log data set are filled raises an appropriate error hint. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + - zos_unarchive - When copying to remote fails now a proper error message is + displayed. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + - zos_unarchive - When copying to remote if space_primary is not defined, then + is defaulted to 5M. (https://github.com/ansible-collections/ibm_zos_core/pull/930). + release_summary: "Release Date: '2023-08-21' This changelog describes all changes made to the modules and plugins included @@ -1925,40 +1932,40 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 930-archive-post-beta.yml - - v1.7.0-beta.2_summary.yml - release_date: '2023-08-21' + - 930-archive-post-beta.yml + - v1.7.0-beta.2_summary.yml + release_date: "2023-08-21" 1.8.0: changes: bugfixes: - - zos_copy - When copying an executable data set from controller to managed - node, copy operation failed with an encoding error. Fix now avoids encoding - when executable option is selected. (https://github.com/ansible-collections/ibm_zos_core/pull/1079). - - zos_copy - When copying an executable data set with aliases and destination - did not exist, destination data set was created with wrong attributes. Fix - now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1067). - - zos_copy - When performing a copy operation to an existing file, the copied - file resulted in having corrupted contents. Fix now implements a workaround - to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1069). - - zos_operator - The module was ignoring the wait time argument. The module - now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). - - zos_operator_action_query - The module was ignoring the wait time argument. - The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). - - zos_unarchive - When zos_unarchive fails during unpack either with xmit or - terse it does not clean the temporary data sets created. Fix now removes the - temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054). + - zos_copy - When copying an executable data set from controller to managed + node, copy operation failed with an encoding error. Fix now avoids encoding + when executable option is selected. (https://github.com/ansible-collections/ibm_zos_core/pull/1079). + - zos_copy - When copying an executable data set with aliases and destination + did not exist, destination data set was created with wrong attributes. Fix + now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1067). + - zos_copy - When performing a copy operation to an existing file, the copied + file resulted in having corrupted contents. Fix now implements a workaround + to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1069). + - zos_operator - The module was ignoring the wait time argument. The module + now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). + - zos_operator_action_query - The module was ignoring the wait time argument. + The module now passes the wait time argument to ZOAU. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). + - zos_unarchive - When zos_unarchive fails during unpack either with xmit or + terse it does not clean the temporary data sets created. Fix now removes the + temporary data sets. (https://github.com/ansible-collections/ibm_zos_core/pull/1054). minor_changes: - - zos_job_submit - Previous code did not return output, but still requested - job data from the target system. This changes to honor return_output=false - by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). - - zos_script - Add support for remote_tmp from the Ansible configuration to - setup where temporary files will be created, replacing the module option tmp_path. - (https://github.com/ansible-collections/ibm_zos_core/pull/1068). - - zos_tso_command - Add example for executing explicitly a REXX script from - a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). - release_summary: 'Release Date: ''2023-12-08'' + - zos_job_submit - Previous code did not return output, but still requested + job data from the target system. This changes to honor return_output=false + by not querying the job dd segments at all. (https://github.com/ansible-collections/ibm_zos_core/pull/1063). + - zos_script - Add support for remote_tmp from the Ansible configuration to + setup where temporary files will be created, replacing the module option tmp_path. + (https://github.com/ansible-collections/ibm_zos_core/pull/1068). + - zos_tso_command - Add example for executing explicitly a REXX script from + a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1072). + release_summary: "Release Date: '2023-12-08' This changelog describes all changes made to the modules and plugins included @@ -1966,88 +1973,88 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 1041-bug-zos-submit-job-honor-return-output-literally.yml - - 1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml - - 1049-xmit-temporary-data-sets.yml - - 1060-remote_tmp_zos_script.yml - - 1067-mvs_to_non_existent_mvs_copy_verify_destination_attrs_match.yml - - 1069-corrupted-second-copy.yml - - 1072-rexx-exec-tso_command.yml - - 1079-zos-copy-local-executable.yml - - 1089-update-managed_node_doc.yml - - v1.8.0-summary.yml - release_date: '2023-12-08' + - 1041-bug-zos-submit-job-honor-return-output-literally.yml + - 1043-bug-title-zos_operator-is-passing-wrong-value-to-zoauopercmd.yml + - 1049-xmit-temporary-data-sets.yml + - 1060-remote_tmp_zos_script.yml + - 1067-mvs_to_non_existent_mvs_copy_verify_destination_attrs_match.yml + - 1069-corrupted-second-copy.yml + - 1072-rexx-exec-tso_command.yml + - 1079-zos-copy-local-executable.yml + - 1089-update-managed_node_doc.yml + - v1.8.0-summary.yml + release_date: "2023-12-08" 1.8.0-beta.1: changes: bugfixes: - - zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption - value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968). - - zos_job_submit - Temporary files were created in tmp directory. Fix now ensures - the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) - - zos_job_submit - The last line of the jcl was missing in the input. Fix now - ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) - - zos_lineinfile - A duplicate entry was made even if line was already present - in the target file. Fix now prevents a duplicate entry if the line already - exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) - - zos_operator - The last line of the operator was missing in the response of - the module. The fix now ensures the presence of the full output of the operator. - https://github.com/ansible-collections/ibm_zos_core/pull/918) + - zos_copy - Update option limit to include LIBRARY as dest_dataset/suboption + value. Documentation updated to reflect this change. (https://github.com/ansible-collections/ibm_zos_core/pull/968). + - zos_job_submit - Temporary files were created in tmp directory. Fix now ensures + the deletion of files every time the module run. (https://github.com/ansible-collections/ibm_zos_core/pull/951) + - zos_job_submit - The last line of the jcl was missing in the input. Fix now + ensures the presence of the full input in job_submit. (https://github.com/ansible-collections/ibm_zos_core/pull/952) + - zos_lineinfile - A duplicate entry was made even if line was already present + in the target file. Fix now prevents a duplicate entry if the line already + exists in the target file. (https://github.com/ansible-collections/ibm_zos_core/pull/916) + - zos_operator - The last line of the operator was missing in the response of + the module. The fix now ensures the presence of the full output of the operator. + https://github.com/ansible-collections/ibm_zos_core/pull/918) deprecated_features: - - zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). + - zos_blockinfile debug - is deprecated in favor of 'as_json' (https://github.com/ansible-collections/ibm_zos_core/pull/904). known_issues: - - Several modules have reported UTF8 decoding errors when interacting with results - that contain non-printable UTF8 characters in the response. This occurs when - a module receives content that does not correspond to a UTF-8 value. These - include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` - but are not limited to this list. This will be addressed in `ibm_zos_core` - version 1.10.0-beta.1. Each case is unique, some options to work around the - error are below. - Specify that the ASA assembler option be enabled to instruct - the assembler to use ANSI control characters instead of machine code control - characters. - Add `ignore_errors:true` to the playbook task so the task error - will not fail the playbook. - If the error is resulting from a batch job, - add `ignore_errors:true` to the task and capture the output into a variable - and extract the job ID with a regular expression and then use `zos_job_output` - to display the DD without the non-printable character such as the DD `JESMSGLG`. - (https://github.com/ansible-collections/ibm_zos_core/issues/677) (https://github.com/ansible-collections/ibm_zos_core/issues/776) - (https://github.com/ansible-collections/ibm_zos_core/issues/972) - - With later versions of `ansible-core` used with `ibm_zos_core` collection - a warning has started to appear "Module "ansible.builtin.command" returned - non UTF-8 data in the JSON response" that is currently being reviewed. There - are no recommendations at this point. (https://github.com/ansible-collections/ibm_zos_core/issues/983) + - Several modules have reported UTF8 decoding errors when interacting with results + that contain non-printable UTF8 characters in the response. This occurs when + a module receives content that does not correspond to a UTF-8 value. These + include modules `zos_job_submit`, `zos_job_output`, `zos_operator_action_query` + but are not limited to this list. This will be addressed in `ibm_zos_core` + version 1.10.0-beta.1. Each case is unique, some options to work around the + error are below. - Specify that the ASA assembler option be enabled to instruct + the assembler to use ANSI control characters instead of machine code control + characters. - Add `ignore_errors:true` to the playbook task so the task error + will not fail the playbook. - If the error is resulting from a batch job, + add `ignore_errors:true` to the task and capture the output into a variable + and extract the job ID with a regular expression and then use `zos_job_output` + to display the DD without the non-printable character such as the DD `JESMSGLG`. + (https://github.com/ansible-collections/ibm_zos_core/issues/677) (https://github.com/ansible-collections/ibm_zos_core/issues/776) + (https://github.com/ansible-collections/ibm_zos_core/issues/972) + - With later versions of `ansible-core` used with `ibm_zos_core` collection + a warning has started to appear "Module "ansible.builtin.command" returned + non UTF-8 data in the JSON response" that is currently being reviewed. There + are no recommendations at this point. (https://github.com/ansible-collections/ibm_zos_core/issues/983) minor_changes: - - module_utils/template - Add validation into path joins to detect unauthorized - path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - - zos_archive - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - - zos_archive - Enhanced test cases to use test lines the same length of the - record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) - - zos_copy - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/962) - - zos_copy - Add new option `force_lock` that can copy into data sets that are - already in use by other processes (DISP=SHR). User needs to use with caution - because this is subject to race conditions and can lead to data loss. (https://github.com/ansible-collections/ibm_zos_core/pull/980). - - zos_copy - includes a new option `executable` that enables copying of executables - such as load modules or program objects to both USS and partitioned data sets. - When the `dest` option contains a non-existent data set, `zos_copy` will create - a data set with the appropriate attributes for an executable. (https://github.com/ansible-collections/ibm_zos_core/pull/804) - - zos_copy - introduces a new option 'aliases' to enable preservation of member - aliases when copying data to partitioned data sets (PDS) destinations from - USS or other PDS sources. Copying aliases of text based members to/from USS - is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014) - - zos_fetch - Add validation into path joins to detect unauthorized path traversals. - (https://github.com/ansible-collections/ibm_zos_core/pull/962) - - zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) - - zos_operator - Changed system to call 'wait=true' parameter to zoau call. - Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - - zos_operator_action_query - Add a max delay of 5 seconds on each part of the - operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) - - zos_unarchive - Add validation into path joins to detect unauthorized path - traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) - - zos_unarchive - Enhanced test cases to use test lines the same length of the - record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) - release_summary: 'Release Date: ''2023-10-24'' + - module_utils/template - Add validation into path joins to detect unauthorized + path traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_archive - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_archive - Enhanced test cases to use test lines the same length of the + record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) + - zos_copy - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/962) + - zos_copy - Add new option `force_lock` that can copy into data sets that are + already in use by other processes (DISP=SHR). User needs to use with caution + because this is subject to race conditions and can lead to data loss. (https://github.com/ansible-collections/ibm_zos_core/pull/980). + - zos_copy - includes a new option `executable` that enables copying of executables + such as load modules or program objects to both USS and partitioned data sets. + When the `dest` option contains a non-existent data set, `zos_copy` will create + a data set with the appropriate attributes for an executable. (https://github.com/ansible-collections/ibm_zos_core/pull/804) + - zos_copy - introduces a new option 'aliases' to enable preservation of member + aliases when copying data to partitioned data sets (PDS) destinations from + USS or other PDS sources. Copying aliases of text based members to/from USS + is not supported. (https://github.com/ansible-collections/ibm_zos_core/pull/1014) + - zos_fetch - Add validation into path joins to detect unauthorized path traversals. + (https://github.com/ansible-collections/ibm_zos_core/pull/962) + - zos_job_submit - Change action plugin call from copy to zos_copy. (https://github.com/ansible-collections/ibm_zos_core/pull/951) + - zos_operator - Changed system to call 'wait=true' parameter to zoau call. + Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) + - zos_operator_action_query - Add a max delay of 5 seconds on each part of the + operator_action_query. Requires zoau 1.2.5 or later. (https://github.com/ansible-collections/ibm_zos_core/pull/976) + - zos_unarchive - Add validation into path joins to detect unauthorized path + traversals. (https://github.com/ansible-collections/ibm_zos_core/pull/1029) + - zos_unarchive - Enhanced test cases to use test lines the same length of the + record length. (https://github.com/ansible-collections/ibm_zos_core/pull/965) + release_summary: "Release Date: '2023-10-24' This changelog describes all changes made to the modules and plugins included @@ -2055,85 +2062,85 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 1014-zos-copy-add-data-set-member-aliases.yml - - 1018-internal-consolidate-version-checks.yml - - 1029-validate-path-join.yml - - 1034-document-utf8-known-issue.yml - - 804-improved_load_module_and_program_object_support.yml - - 904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml - - 916-zos-lineinfile-does-not-behave-community.yml - - 918-zos-operator-response-come-back-truncate.yaml - - 920-zos-copy-add-library-choice.yml - - 934-Remove-conditional-unnecessary.yml - - 943-enhance-Add-wait-zos-operator-and-query.yml - - 951-Change-copy-for-zos-copy-and-remove-temporary-files.yml - - 952-zos-job-submit-truncate-final-character.yml - - 959-ac-tool-update-mounts.yml - - 963-validate-path-join.yml - - 964-modify-get_data_set_attributes-function.yml - - 965-enhance-archive-tests.yml - - 966-ac-tool-add-python-311-3.yml - - 969-Simplify_loadlib_test_cases.yml - - 980-zos-copy-disp-shr.yml - - v1.8.0-beta.1.yml + - 1014-zos-copy-add-data-set-member-aliases.yml + - 1018-internal-consolidate-version-checks.yml + - 1029-validate-path-join.yml + - 1034-document-utf8-known-issue.yml + - 804-improved_load_module_and_program_object_support.yml + - 904-Deprecate_debug_as_true_in_zos_blockinfile_and_set_json_as_true.yml + - 916-zos-lineinfile-does-not-behave-community.yml + - 918-zos-operator-response-come-back-truncate.yaml + - 920-zos-copy-add-library-choice.yml + - 934-Remove-conditional-unnecessary.yml + - 943-enhance-Add-wait-zos-operator-and-query.yml + - 951-Change-copy-for-zos-copy-and-remove-temporary-files.yml + - 952-zos-job-submit-truncate-final-character.yml + - 959-ac-tool-update-mounts.yml + - 963-validate-path-join.yml + - 964-modify-get_data_set_attributes-function.yml + - 965-enhance-archive-tests.yml + - 966-ac-tool-add-python-311-3.yml + - 969-Simplify_loadlib_test_cases.yml + - 980-zos-copy-disp-shr.yml + - v1.8.0-beta.1.yml modules: - - description: Run scripts in z/OS - name: zos_script - namespace: '' - release_date: '2023-10-24' + - description: Run scripts in z/OS + name: zos_script + namespace: "" + release_date: "2023-10-24" 1.9.0: changes: bugfixes: - - module_utils/job.py - job output containing non-printable characters would - crash modules. Fix now handles the error gracefully and returns a message - to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288). - - zos_apf - When operation=list was selected and more than one data set entry - was fetched, the module only returned one data set. Fix now returns the complete - list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236). - - zos_data_set - Fixes a small parsing bug in module_utils/data_set function - which extracts volume serial(s) from a LISTCAT command output. Previously - a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247). - - zos_job_submit - Was ignoring the default value for location=DATA_SET, now - when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120). - - zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained - JCLERROR followed by an integer where the integer appeared to be a reason - code when actually it is a multi line marker used to coordinate errors spanning - more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned - for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). - - zos_job_submit - when a response was returned, it contained an undocumented - property; ret_code[msg_text]. Now when a response is returned, it correctly - returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). - - zos_job_submit - when typrun=copy was used in JCL it would fail the module - with an improper message and error condition. While this case continues to - be considered a failure, the message has been corrected and it fails under - the condition that not enough time has been added to the modules execution. - (https://github.com/ansible-collections/ibm_zos_core/pull/1283). - - zos_job_submit - when typrun=hold was used in JCL it would fail the module - with an improper message and error condition. While this case continues to - be considered a failure, the message has been corrected and it fails under - the condition that not enough time has been added to the modules execution. - (https://github.com/ansible-collections/ibm_zos_core/pull/1283). - - zos_job_submit - when typrun=jchhold was used in JCL it would fail the module - with an improper message and error condition. While this case continues to - be considered a failure, the message has been corrected and it fails under - the condition that not enough time has been added to the modules execution. - (https://github.com/ansible-collections/ibm_zos_core/pull/1283). - - zos_job_submit - when typrun=scan was used in JCL, it would fail the module. - Now typrun=scan no longer fails the module and an appropriate message is returned - with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). - - zos_job_submit - when wait_time_s was used, the duration would run approximately - 5 second longer than reported in the duration. Now the when duration is returned, - it is the actual accounting from when the job is submitted to when the module - reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - module_utils/job.py - job output containing non-printable characters would + crash modules. Fix now handles the error gracefully and returns a message + to the user inside `content` of the `ddname` that failed. (https://github.com/ansible-collections/ibm_zos_core/pull/1288). + - zos_apf - When operation=list was selected and more than one data set entry + was fetched, the module only returned one data set. Fix now returns the complete + list. (https://github.com/ansible-collections/ibm_zos_core/pull/1236). + - zos_data_set - Fixes a small parsing bug in module_utils/data_set function + which extracts volume serial(s) from a LISTCAT command output. Previously + a leading '-' was left behind for volser strings under 6 chars. (https://github.com/ansible-collections/ibm_zos_core/pull/1247). + - zos_job_submit - Was ignoring the default value for location=DATA_SET, now + when location is not specified it will default to DATA_SET. (https://github.com/ansible-collections/ibm_zos_core/pull/1120). + - zos_job_submit - when a JCL error occurred, the ret_code[msg_code] contained + JCLERROR followed by an integer where the integer appeared to be a reason + code when actually it is a multi line marker used to coordinate errors spanning + more than one line. Now when a JCLERROR occurs, only the JCLERROR is returned + for property ret_code[msg_code]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when a response was returned, it contained an undocumented + property; ret_code[msg_text]. Now when a response is returned, it correctly + returns property ret_code[msg_txt]. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=copy was used in JCL it would fail the module + with an improper message and error condition. While this case continues to + be considered a failure, the message has been corrected and it fails under + the condition that not enough time has been added to the modules execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=hold was used in JCL it would fail the module + with an improper message and error condition. While this case continues to + be considered a failure, the message has been corrected and it fails under + the condition that not enough time has been added to the modules execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=jchhold was used in JCL it would fail the module + with an improper message and error condition. While this case continues to + be considered a failure, the message has been corrected and it fails under + the condition that not enough time has been added to the modules execution. + (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when typrun=scan was used in JCL, it would fail the module. + Now typrun=scan no longer fails the module and an appropriate message is returned + with appropriate return code values. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + - zos_job_submit - when wait_time_s was used, the duration would run approximately + 5 second longer than reported in the duration. Now the when duration is returned, + it is the actual accounting from when the job is submitted to when the module + reads the job output. (https://github.com/ansible-collections/ibm_zos_core/pull/1283). major_changes: - - zos_job_submit - when job statuses were read, were limited to AC (active), - CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC - (security error), JCLERROR (job had a jcl error). Now the additional statuses - are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter - error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283). - release_summary: 'Release Date: ''2024-03-11'' + - zos_job_submit - when job statuses were read, were limited to AC (active), + CC (completed normally), ABEND (ended abnormally) and ? (error unknown), SEC + (security error), JCLERROR (job had a jcl error). Now the additional statuses + are supported, CANCELLED (job was cancelled), CAB (converter abend), CNV (converter + error), SYS (system failure) and FLU (job was flushed). (https://github.com/ansible-collections/ibm_zos_core/pull/1283). + release_summary: "Release Date: '2024-03-11' This changelog describes all changes made to the modules and plugins included @@ -2141,73 +2148,73 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 1120-bugfix-zos_job_submit-default_value.yml - - 1236-bugfix-zos_apf-return-list.yml - - 1246-bugfix-zos_job_submit-typrun.yml - - 1247-volser-parsing-leading-dash-bugfix.yml - - 1288-job-submit-non-utf8-chars.yml - - 1292-doc-zos_tso_command-example.yml - - 1294-doc-zos_ping-scp.yml - - 1296-doc-sftp-collection-requirements.yml - - v1.9.0_summary.yml - release_date: '2024-03-16' + - 1120-bugfix-zos_job_submit-default_value.yml + - 1236-bugfix-zos_apf-return-list.yml + - 1246-bugfix-zos_job_submit-typrun.yml + - 1247-volser-parsing-leading-dash-bugfix.yml + - 1288-job-submit-non-utf8-chars.yml + - 1292-doc-zos_tso_command-example.yml + - 1294-doc-zos_ping-scp.yml + - 1296-doc-sftp-collection-requirements.yml + - v1.9.0_summary.yml + release_date: "2024-03-16" 1.9.0-beta.1: changes: bugfixes: - - zos_copy - When copying an executable data set with aliases and destination - did not exist, destination data set was created with wrong attributes. Fix - now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). - - zos_copy - When performing a copy operation to an existing file, the copied - file resulted in having corrupted contents. Fix now implements a workaround - to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). - - zos_job_output - When passing a job ID or name less than 8 characters long, - the module sent the full stack trace as the module's message. Change now allows - the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). - - zos_job_query - The module handling ZOAU import errors obscured the original - traceback when an import error ocurred. Fix now passes correctly the context - to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). - - zos_job_query - When passing a job ID or name less than 8 characters long, - the module sent the full stack trace as the module's message. Change now allows - the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). - - zos_operator - The module handling ZOAU import errors obscured the original - traceback when an import error ocurred. Fix now passes correctly the context - to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). - - zos_unarchive - Using a local file with a USS format option failed when sending - to remote because dest_data_set option had an empty dictionary. Fix now leaves - dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). - - zos_unarchive - When unarchiving USS files, the module left temporary files - on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). + - zos_copy - When copying an executable data set with aliases and destination + did not exist, destination data set was created with wrong attributes. Fix + now creates destination data set with the same attributes as the source. (https://github.com/ansible-collections/ibm_zos_core/pull/1066). + - zos_copy - When performing a copy operation to an existing file, the copied + file resulted in having corrupted contents. Fix now implements a workaround + to not use the specific copy routine that corrupts the file contents. (https://github.com/ansible-collections/ibm_zos_core/pull/1064). + - zos_job_output - When passing a job ID or name less than 8 characters long, + the module sent the full stack trace as the module's message. Change now allows + the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + - zos_job_query - The module handling ZOAU import errors obscured the original + traceback when an import error ocurred. Fix now passes correctly the context + to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). + - zos_job_query - When passing a job ID or name less than 8 characters long, + the module sent the full stack trace as the module's message. Change now allows + the use of a shorter job ID or name, as well as wildcards. (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + - zos_operator - The module handling ZOAU import errors obscured the original + traceback when an import error ocurred. Fix now passes correctly the context + to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1042). + - zos_unarchive - Using a local file with a USS format option failed when sending + to remote because dest_data_set option had an empty dictionary. Fix now leaves + dest_data_set as None when using a USS format option. (https://github.com/ansible-collections/ibm_zos_core/pull/1045). + - zos_unarchive - When unarchiving USS files, the module left temporary files + on the remote. Change now removes temporary files. (https://github.com/ansible-collections/ibm_zos_core/pull/1073). minor_changes: - - zos_apf - Improves exception handling if there is a failure parsing the command - response when operation selected is list. (https://github.com/ansible-collections/ibm_zos_core/pull/1036). - - zos_copy - Improve zos_copy performance when copying multiple members from - one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1176). - - zos_job_output - When passing a job ID and owner the module take as mutually - exclusive. Change now allows the use of a job ID and owner at the same time. - (https://github.com/ansible-collections/ibm_zos_core/pull/1078). - - zos_job_submit - Improve error messages in zos_job_submit to be clearer. (https://github.com/ansible-collections/ibm_zos_core/pull/1074). - - zos_job_submit - The module had undocumented parameter and uses as temporary - file when the location of the file is LOCAL. Change now uses the same name - as the src for the temporary file removing the addition of tmp_file to the - arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). - - zos_job_submit - The module handling ZOAU import errors obscured the original - traceback when an import error ocurred. Fix now passes correctly the context - to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). - - zos_mvs_raw - when using the dd_input content option for instream-data, if - the content was not properly indented according to the program which is generally - a blank in columns 1 & 2, those columns would be truncated. Now, when setting - instream-data, the module will ensure that all lines contain a blank in columns - 1 and 2 and add blanks when not present while retaining a maximum length of - 80 columns for any line. This is true for all content types; string, list - of strings and when using a YAML block indicator. (https://github.com/ansible-collections/ibm_zos_core/pull/1057). - - zos_mvs_raw - no examples were included with the module that demonstrated - using a YAML block indicator, this now includes examples using a YAML block - indicator. - - zos_tso_command - add example for executing explicitly a REXX script from - a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1065). - release_summary: 'Release Date: ''2024-01-31'' + - zos_apf - Improves exception handling if there is a failure parsing the command + response when operation selected is list. (https://github.com/ansible-collections/ibm_zos_core/pull/1036). + - zos_copy - Improve zos_copy performance when copying multiple members from + one PDS/E to another. (https://github.com/ansible-collections/ibm_zos_core/pull/1176). + - zos_job_output - When passing a job ID and owner the module take as mutually + exclusive. Change now allows the use of a job ID and owner at the same time. + (https://github.com/ansible-collections/ibm_zos_core/pull/1078). + - zos_job_submit - Improve error messages in zos_job_submit to be clearer. (https://github.com/ansible-collections/ibm_zos_core/pull/1074). + - zos_job_submit - The module had undocumented parameter and uses as temporary + file when the location of the file is LOCAL. Change now uses the same name + as the src for the temporary file removing the addition of tmp_file to the + arguments. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). + - zos_job_submit - The module handling ZOAU import errors obscured the original + traceback when an import error ocurred. Fix now passes correctly the context + to the user. (https://github.com/ansible-collections/ibm_zos_core/pull/1091). + - zos_mvs_raw - when using the dd_input content option for instream-data, if + the content was not properly indented according to the program which is generally + a blank in columns 1 & 2, those columns would be truncated. Now, when setting + instream-data, the module will ensure that all lines contain a blank in columns + 1 and 2 and add blanks when not present while retaining a maximum length of + 80 columns for any line. This is true for all content types; string, list + of strings and when using a YAML block indicator. (https://github.com/ansible-collections/ibm_zos_core/pull/1057). + - zos_mvs_raw - no examples were included with the module that demonstrated + using a YAML block indicator, this now includes examples using a YAML block + indicator. + - zos_tso_command - add example for executing explicitly a REXX script from + a data set. (https://github.com/ansible-collections/ibm_zos_core/pull/1065). + release_summary: "Release Date: '2024-01-31' This changelog describes all changes made to the modules and plugins included @@ -2215,34 +2222,34 @@ releases: For additional details such as required dependencies and availability review - the collections `release notes `__' + the collections `release notes `__" fragments: - - 1016-remove-randint.yml - - 1036-apf-try-except.yml - - 1042-missing-zoau-imports.yml - - 1045-local-uss-unarchive.yml - - 1048-Update_sanity_tests_ignore.yml - - 1048-update-ac-tool-pyyaml-version.yml - - 1051-try-except-pass-zos_mvs_raw.yml - - 1052-try-except-pass-dd-statement.yml - - 1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml - - 1055-remove-subprocess-encode.yml - - 1056-Update_sanity_ignore_2_16.yml - - 1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml - - 1064-corruped-second-copy.yml - - 1065-rexx-exec-tso_command.yml - - 1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml - - 1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml - - 1074-improve-job-submit-error-msgs.yml - - 1077-modify-uss-extraction.yml - - 1078-short_job_name_sends_back_a_value_error.yaml - - 1091-Update_undocumented_argument_and_import_exception.yml - - 1101-fix-undefined-var.yml - - 1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml - - 1163-Refactor_calls_to_use_new_alias_and_execute_options.yml - - 1176-copy-members.yml - - 1195-Add_prefer_volumes_user.yml - - 1200-zos_backup_restore-sanity-issues.yml - - 977-remove-hard-coded-vols-and-datasets.yml - - v1.9.0-beta.1_summary.yml - release_date: '2024-02-01' + - 1016-remove-randint.yml + - 1036-apf-try-except.yml + - 1042-missing-zoau-imports.yml + - 1045-local-uss-unarchive.yml + - 1048-Update_sanity_tests_ignore.yml + - 1048-update-ac-tool-pyyaml-version.yml + - 1051-try-except-pass-zos_mvs_raw.yml + - 1052-try-except-pass-dd-statement.yml + - 1053-Enabler_1002_test_collections_on_ansible_core_2_16.yml + - 1055-remove-subprocess-encode.yml + - 1056-Update_sanity_ignore_2_16.yml + - 1057-module_zos_mvs_raw_errors_with_long_multi_line_quoted_string_in_content_field.yml + - 1064-corruped-second-copy.yml + - 1065-rexx-exec-tso_command.yml + - 1066-Mvs_to_non_existent_mvs_copy_destination_attrs_match_up.yml + - 1073-action_plugin_does_not_clean_up_remote_temporary_files_after_completion.yml + - 1074-improve-job-submit-error-msgs.yml + - 1077-modify-uss-extraction.yml + - 1078-short_job_name_sends_back_a_value_error.yaml + - 1091-Update_undocumented_argument_and_import_exception.yml + - 1101-fix-undefined-var.yml + - 1158-Test-zos-fetch-find-remove-hardcoded-datasets.yml + - 1163-Refactor_calls_to_use_new_alias_and_execute_options.yml + - 1176-copy-members.yml + - 1195-Add_prefer_volumes_user.yml + - 1200-zos_backup_restore-sanity-issues.yml + - 977-remove-hard-coded-vols-and-datasets.yml + - v1.9.0-beta.1_summary.yml + release_date: "2024-02-01" diff --git a/changelogs/fragments/2204-Update_zos_job_query_module.yml b/changelogs/fragments/2204-Update_zos_job_query_module.yml new file mode 100644 index 0000000000..b3a2270c37 --- /dev/null +++ b/changelogs/fragments/2204-Update_zos_job_query_module.yml @@ -0,0 +1,3 @@ +breaking_changes: + - zos_job_query - Return field ``message`` is deprecated in favor of ``msg``. Return field ``steps`` is no longer under ``ret_code`` but is now included under ``jobs``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2204). \ No newline at end of file diff --git a/changelogs/fragments/2208_update_job_modules_interfaces.yml b/changelogs/fragments/2208_update_job_modules_interfaces.yml new file mode 100644 index 0000000000..e84fc8d9cf --- /dev/null +++ b/changelogs/fragments/2208_update_job_modules_interfaces.yml @@ -0,0 +1,12 @@ +breaking_changes: + - zos_job_output - Option name ``ddname`` is substituted for ``dd_name``, but ``ddname`` is kept as an alias. Return value ``changed`` is always returned and return value ``steps`` are no longer under ``ret_code`` + but is now included under `jobs`. Return value ``ddnames`` is replaced by ``dds`` and value ``ddname`` under ``ddnames`` is replaced by ``dd_name``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2208). + - zos_job_submit - Interface value ``location`` is replace for ``remote_src`` in a bool value and ``wait_time_s`` is replaced for ``wait_time``. + Return value ``ddnames`` replaced by ``dds`` and value ``ddname`` under ``ddnames`` is replace by ``dd_name``. Now all values are returned under ``jobs`` except ``changed``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2208). + +trivial: + - test/zos_job_query_func.py - Update test validation to new values for job submit. Return value ``steps`` are no longer under `ret_code` + but is now included under `jobs`. + (https://github.com/ansible-collections/ibm_zos_core/pull/2208). \ No newline at end of file diff --git a/changelogs/fragments/2202-zos_data_set-Support-noscratch-options.yml b/changelogs/fragments/2210-zos_data_set-Support-noscratch-options.yml similarity index 97% rename from changelogs/fragments/2202-zos_data_set-Support-noscratch-options.yml rename to changelogs/fragments/2210-zos_data_set-Support-noscratch-options.yml index d1bf0fdad0..aa9ff7330d 100644 --- a/changelogs/fragments/2202-zos_data_set-Support-noscratch-options.yml +++ b/changelogs/fragments/2210-zos_data_set-Support-noscratch-options.yml @@ -1,10 +1,10 @@ minor_changes: - zos_data_set - Adds `noscratch` option to allow uncataloging a data set without deleting it from the volume's VTOC. - (https://github.com/ansible-collections/ibm_zos_core/pull/2202) + (https://github.com/ansible-collections/ibm_zos_core/pull/2210) trivial: - data_set - Internal updates to support the noscratch option. - https://github.com/ansible-collections/ibm_zos_core/pull/2202) + https://github.com/ansible-collections/ibm_zos_core/pull/2210) - test_zos_data_set_func - added test case to verify the `noscratch` option functionality in zos_data_set module. - (https://github.com/ansible-collections/ibm_zos_core/pull/2202). + (https://github.com/ansible-collections/ibm_zos_core/pull/2210). diff --git a/changelogs/fragments/2215-test-case-conditional-failure-2-19.yml b/changelogs/fragments/2215-test-case-conditional-failure-2-19.yml new file mode 100644 index 0000000000..64d8bea6d2 --- /dev/null +++ b/changelogs/fragments/2215-test-case-conditional-failure-2-19.yml @@ -0,0 +1,19 @@ +minor_changes: + - zos_job_output - Adds support to query SYSIN DDs from a job with new option input. + (https://github.com/ansible-collections/ibm_zos_core/pull/2215) +trivial: + - test_zos_copy_func.py - modified test case `test_job_script_async` + to resolve porting issues to ansible 2.19. + (https://github.com/ansible-collections/ibm_zos_core/pull/2215). + + - test_zos_job_submit_func.py - modified test case `test_job_submit_async` + to resolve porting issues to ansible 2.19. + (https://github.com/ansible-collections/ibm_zos_core/pull/2215). + + - test_zos_script_func.py - modified test case `test_job_script_async` + to resolve porting issues to ansible 2.19. + (https://github.com/ansible-collections/ibm_zos_core/pull/2215). + + - test_zos_unarchive_func.py - modified test case `test_zos_unarchive_async` + to resolve porting issues to ansible 2.19. + (https://github.com/ansible-collections/ibm_zos_core/pull/2215). \ No newline at end of file diff --git a/changelogs/fragments/2218-zos_operator_action_query-interface-update.yml b/changelogs/fragments/2218-zos_operator_action_query-interface-update.yml new file mode 100644 index 0000000000..f26747e77f --- /dev/null +++ b/changelogs/fragments/2218-zos_operator_action_query-interface-update.yml @@ -0,0 +1,4 @@ +breaking_changes: + - zos_job_query - Option ``message_filter`` is deprecated in favor of ``msg_filter``. Option ``message_id`` is deprecated in favor of ``msg_id``. + Return value ``message_id`` is deprecated in favor of ``msg_id``. Return value ``message_text`` is deprecated in favor of ``msg_txt``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2218). \ No newline at end of file diff --git a/changelogs/fragments/2228-zos_encode-interface-update.yml b/changelogs/fragments/2228-zos_encode-interface-update.yml new file mode 100644 index 0000000000..6988482e1d --- /dev/null +++ b/changelogs/fragments/2228-zos_encode-interface-update.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_encode - Adds new return value ``encoding`` with ``from`` and ``to`` encoding values used in the operation. + (https://github.com/ansible-collections/ibm_zos_core/pull/2228). \ No newline at end of file diff --git a/changelogs/fragments/2230_zos_operator_interface_update.yml b/changelogs/fragments/2230_zos_operator_interface_update.yml new file mode 100644 index 0000000000..50c1e6c1e5 --- /dev/null +++ b/changelogs/fragments/2230_zos_operator_interface_update.yml @@ -0,0 +1,4 @@ +breaking_changes: + - zos_operator - Option ``wait_time_s`` is being deprecated in favor of ``wait_time``. New option ``time_unit`` is being added to select + seconds or centiseconds. New return value ``time_unit`` is being added. Return value ``wait_time_s`` is being deprecated in favor of ``wait_time``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2230). \ No newline at end of file diff --git a/changelogs/fragments/2231-zos_fetch-interface-update.yml b/changelogs/fragments/2231-zos_fetch-interface-update.yml new file mode 100644 index 0000000000..4f603783f7 --- /dev/null +++ b/changelogs/fragments/2231-zos_fetch-interface-update.yml @@ -0,0 +1,3 @@ +breaking_changes: + - zos_fetch - Return value ``file`` is replaced by ``src``. Return value ``note`` is deprecated, the messages thrown in ``note`` are now returned in ``msg``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2231). diff --git a/changelogs/fragments/2232_Update_zos_copy_interface.yml b/changelogs/fragments/2232_Update_zos_copy_interface.yml new file mode 100644 index 0000000000..2e850186e4 --- /dev/null +++ b/changelogs/fragments/2232_Update_zos_copy_interface.yml @@ -0,0 +1,6 @@ +breaking_changes: + - zos_copy - Option ``force_lock`` is deprecated in favor of ``force`` for using datasets on dsp=shr. + Option ``force`` is deprecated in favor of ``replace`` for cases you want to replace a dest already exists. + Option ``executable`` is deprecated in favor of ``is_executable``. + Now return value ``dest_created`` is always return with bool value. + (https://github.com/ansible-collections/ibm_zos_core/pull/2232). \ No newline at end of file diff --git a/changelogs/fragments/2238-space-type-default-zos_backup_restore.yml b/changelogs/fragments/2238-space-type-default-zos_backup_restore.yml new file mode 100644 index 0000000000..af8e226e28 --- /dev/null +++ b/changelogs/fragments/2238-space-type-default-zos_backup_restore.yml @@ -0,0 +1,4 @@ +bugfixes: + - zos_backup_restore - Module documentation stated that default ``space_type`` for a backup was ``m`` but module would use bytes instead. + Fix now uses the correct default space type. + (https://github.com/ansible-collections/ibm_zos_core/pull/2238). diff --git a/changelogs/fragments/2239_Update_zos_replace_documentation.yml b/changelogs/fragments/2239_Update_zos_replace_documentation.yml new file mode 100644 index 0000000000..55d0b83bcf --- /dev/null +++ b/changelogs/fragments/2239_Update_zos_replace_documentation.yml @@ -0,0 +1,3 @@ +trivial: + - zos_replace_func.py - Update documentation adding default values and add verbosity for after, before and literal options. + (https://github.com/ansible-collections/ibm_zos_core/pull/2239). \ No newline at end of file diff --git a/changelogs/fragments/2240-fix2.18-zos_zfs_resize.yml b/changelogs/fragments/2240-fix2.18-zos_zfs_resize.yml new file mode 100644 index 0000000000..baabe1ee97 --- /dev/null +++ b/changelogs/fragments/2240-fix2.18-zos_zfs_resize.yml @@ -0,0 +1,4 @@ +trivial: + - test_zos_zfs_resize_func.py - modified test case `test_grow_n_shrink_operations_trace_ds_not_created` and `test_grow_n_shrink_operations_trace_ds` + to resolve issues when testing ansible 2.18. + (https://github.com/ansible-collections/ibm_zos_core/pull/2240). diff --git a/changelogs/fragments/2243-zos_blockinfile-interface-update.yml b/changelogs/fragments/2243-zos_blockinfile-interface-update.yml new file mode 100644 index 0000000000..257804db29 --- /dev/null +++ b/changelogs/fragments/2243-zos_blockinfile-interface-update.yml @@ -0,0 +1,4 @@ +minor_changes: + - zos_blockinfile - Adds alias ``after`` for option ``insertafter`` and alias ``before`` for option ``insertbefore``. + Adds new return values ``stdout_lines`` and ``stderr_lines`` that returns any outstanding stdout or stderr message split in lines. + (https://github.com/ansible-collections/ibm_zos_core/pull/2243). diff --git a/changelogs/fragments/2244-zos_mount-interface-update.yml b/changelogs/fragments/2244-zos_mount-interface-update.yml new file mode 100644 index 0000000000..3cd516e7ad --- /dev/null +++ b/changelogs/fragments/2244-zos_mount-interface-update.yml @@ -0,0 +1,4 @@ +breaking_changes: + - zos_mount - Option ``persistent.data_store`` is deprecated in favor of ``persistent.name``. + Option ``persistent.comment`` is deprecated in favor of ``persistent.marker``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2244). \ No newline at end of file diff --git a/changelogs/fragments/2245-zos_tso_command-interface-update.yml b/changelogs/fragments/2245-zos_tso_command-interface-update.yml new file mode 100644 index 0000000000..334467c579 --- /dev/null +++ b/changelogs/fragments/2245-zos_tso_command-interface-update.yml @@ -0,0 +1,9 @@ +breaking_changes: + - zos_tso_command - Return value ``content`` is replaced by ``stdout_lines``. + Return value ``lines`` is replaced by ``line_count``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2245). + +minor_changes: + - zos_tso_command - New return value ``stdout`` contains the command stdout in raw format. + New return value ``stderr_lines`` that returns any outstanding stdout message split in lines. + (https://github.com/ansible-collections/ibm_zos_core/pull/2245). \ No newline at end of file diff --git a/changelogs/fragments/2250-Update_zos_apf_interface.yml b/changelogs/fragments/2250-Update_zos_apf_interface.yml new file mode 100644 index 0000000000..c6acc832ba --- /dev/null +++ b/changelogs/fragments/2250-Update_zos_apf_interface.yml @@ -0,0 +1,3 @@ +breaking_changes: + - zos_apf - Option ``data_set_name`` is deprecated in favor of ``target``. New return values ``stdout_lines`` and ``stderr_lines`` are now returned. + (https://github.com/ansible-collections/ibm_zos_core/pull/2250). \ No newline at end of file diff --git a/changelogs/fragments/2253-zos_lineinfile-interface-update.yml b/changelogs/fragments/2253-zos_lineinfile-interface-update.yml new file mode 100644 index 0000000000..b7131c96b7 --- /dev/null +++ b/changelogs/fragments/2253-zos_lineinfile-interface-update.yml @@ -0,0 +1,12 @@ +breaking_changes: + - zos_lineinfile - Removes return value ``return_content`` in favor of ``stdout``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2253). + +minor_changes: + - zos_lineinfile - Adds alias ``after`` for option ``insertafter`` and alias ``before`` for option ``insertbefore``. + Adds new return values ``stdout_lines`` and ``stderr_lines`` that returns any outstanding stdout or stderr message split in lines. + (https://github.com/ansible-collections/ibm_zos_core/pull/2253). + +trivial: + - test_zos_lineinfile_func.py - Added a proper cleanup of a temporary file. + (https://github.com/ansible-collections/ibm_zos_core/pull/2253). \ No newline at end of file diff --git a/changelogs/fragments/2254-data_set-Enhance-error-message.yml b/changelogs/fragments/2254-data_set-Enhance-error-message.yml new file mode 100644 index 0000000000..64b5d2c6a1 --- /dev/null +++ b/changelogs/fragments/2254-data_set-Enhance-error-message.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_data_set - Enhances error messages when creating a Generation Data Group fails. + (https://github.com/ansible-collections/ibm_zos_core/pull/2254) diff --git a/changelogs/fragments/2256-zos_archive-interface-update.yml b/changelogs/fragments/2256-zos_archive-interface-update.yml new file mode 100644 index 0000000000..bd8c76bfe3 --- /dev/null +++ b/changelogs/fragments/2256-zos_archive-interface-update.yml @@ -0,0 +1,13 @@ +breaking_changes: + - zos_archive - Option ``format.format_options`` is deprecated in favor of ``format.options``. + Option ``format.format_options.name`` is deprecated in favor of ``format.options.type``. + Option ``format.format_options.use_adrdssu`` is deprecated in favor of ``format.options.use_adrdssu``. + Option ``format.format_options.terse_pack`` is deprecated in favor of ``format.options.spack`` as a new boolean option. + (https://github.com/ansible-collections/ibm_zos_core/pull/2256). + +trivial: + - test_zos_find_func.py - modified test case test_find_gdg_and_nonvsam_data_sets condition to make it less fragile in case there + are other data sets with the same HLQ in the system that are not part of the test. + (https://github.com/ansible-collections/ibm_zos_core/pull/2256). + - zos_apf - Modified alias deprecation version from 2.0 to 3.0. + (https://github.com/ansible-collections/ibm_zos_core/pull/2256). \ No newline at end of file diff --git a/changelogs/fragments/2259-add-sphere-zos_backup_restore.yml b/changelogs/fragments/2259-add-sphere-zos_backup_restore.yml new file mode 100644 index 0000000000..f049134247 --- /dev/null +++ b/changelogs/fragments/2259-add-sphere-zos_backup_restore.yml @@ -0,0 +1,8 @@ +minor_changes: + - zos_backup_restore - Adds ``compress`` option to enable compression of partitioned data sets using hardware compression if available. + Adds ``terse`` option to modify the behavior of executing an AMATERSE step to compress the temporary data set for the backup. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259) + - zos_backup_restore - Adds ``index`` that allows for the backup and restore of all the associated + alternate index (AIX®) clusters and paths of a VSAM. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259) + diff --git a/changelogs/fragments/2259-reduce-test-dep-zos_data_set.yml b/changelogs/fragments/2259-reduce-test-dep-zos_data_set.yml new file mode 100644 index 0000000000..243ece7ba2 --- /dev/null +++ b/changelogs/fragments/2259-reduce-test-dep-zos_data_set.yml @@ -0,0 +1,13 @@ +trivial: + - test_zos_backup_restore.py - Removed calls to zos_data_set that would increase the test case dependency between the modules. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259). + - test_zos_blockinfile_func.py - Removed calls to zos_data_set that would increase the test case dependency between the modules. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259). + - test_zos_job_output_func.py - Removed calls to zos_data_set that would increase the test case dependency between the modules. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259). + - test_zos_job_query_func.py - Removed calls to zos_data_set that would increase the test case dependency between the modules. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259). + - test_zos_mount_func.py - Removed calls to zos_data_set that would increase the test case dependency between the modules. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259). + - test_zos_volume_init_func.py - Removed calls to zos_data_set that would increase the test case dependency between the modules. + (https://github.com/ansible-collections/ibm_zos_core/pull/2259). diff --git a/changelogs/fragments/2261-update_zos_find_interface.yml b/changelogs/fragments/2261-update_zos_find_interface.yml new file mode 100644 index 0000000000..777a0ac3b2 --- /dev/null +++ b/changelogs/fragments/2261-update_zos_find_interface.yml @@ -0,0 +1,4 @@ +breaking_changes: + - zos_find - Option value ``pds_pattern`` is deprecated. Previously, excluding members required adding pds_patterns. + The updated behavior now allows excluding members by placing them inside parentheses. + (https://github.com/ansible-collections/ibm_zos_core/pull/2261). \ No newline at end of file diff --git a/changelogs/fragments/2266-port-2255-2251-into-dev.yml b/changelogs/fragments/2266-port-2255-2251-into-dev.yml new file mode 100644 index 0000000000..e8b0648f8b --- /dev/null +++ b/changelogs/fragments/2266-port-2255-2251-into-dev.yml @@ -0,0 +1,17 @@ +minor_changes: + - zos_data_set - Enhances error messages when deleting a Generation Data Group fails. + (https://github.com/ansible-collections/ibm_zos_core/pull/2266) + +trivial: + - zos_job_output - renamed the input parameter to be called as sysin_dd. + (https://github.com/ansible-collections/ibm_zos_core/pull/2266). + + - test_zos_job_output_func - modified the test case 'test_zos_job_output_job_exists_with_sysin' + to use sysin_dd instead of input. + (https://github.com/ansible-collections/ibm_zos_core/pull/2266). + + - test_zos_zfs_resize_func - Added a retry for autoincrease test in case that would fail. + (https://github.com/ansible-collections/ibm_zos_core/pull/2266). + + - test_zos_find_func - Updated an assertion condition that allows for more data sets to reside in the testing machine. + (https://github.com/ansible-collections/ibm_zos_core/pull/2266). \ No newline at end of file diff --git a/changelogs/fragments/2271-update_zos_unarchive_interface.yml b/changelogs/fragments/2271-update_zos_unarchive_interface.yml new file mode 100644 index 0000000000..10c50e8a5e --- /dev/null +++ b/changelogs/fragments/2271-update_zos_unarchive_interface.yml @@ -0,0 +1,6 @@ +breaking_changes: + - zos_unarchive - Option ``format.format_options`` is deprecated in favor of ``format.options``. + Option ``format.format_options.name`` is deprecated in favor of ``format.options.type``. + Option ``format.format_options.use_adrdssu`` is deprecated in favor of ``format.options.use_adrdssu``. + Return values ``stdout_lines`` and ``stderr_lines`` is added. + (https://github.com/ansible-collections/ibm_zos_core/pull/2271). \ No newline at end of file diff --git a/changelogs/fragments/2272-update-interface-zos_mount.yml b/changelogs/fragments/2272-update-interface-zos_mount.yml new file mode 100644 index 0000000000..3babd6d11a --- /dev/null +++ b/changelogs/fragments/2272-update-interface-zos_mount.yml @@ -0,0 +1,8 @@ +breaking_changes: + - zos_mount - Return values ``persistent.addDataset`` and ``persistent.delDataset`` are deprecated in favor of ``persistent.name`` and + ``persistent.state``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2272). +minor_changes: + - zos_mount - New return value ``persistent.state`` that returns the entry state in the persistent data set. + New return value ``persistent.name`` that returns the persistent data set name. + (https://github.com/ansible-collections/ibm_zos_core/pull/2272). diff --git a/changelogs/fragments/2276-Add_special_words_bypass_disable_mng_strg_groups.yml b/changelogs/fragments/2276-Add_special_words_bypass_disable_mng_strg_groups.yml new file mode 100644 index 0000000000..b8ed2147b5 --- /dev/null +++ b/changelogs/fragments/2276-Add_special_words_bypass_disable_mng_strg_groups.yml @@ -0,0 +1,10 @@ +breaking_changes: + - zos_backup_restore - Add new dictionary option ``sms`` to specify how interact with the storage class. + Option ``sms_storage_class`` is deprecated in favor of ``storage_class`` and is under new option ``sms`` being sms.storage_class. + Option ``sms_management_class`` is deprecated in favor of ``management_class`` and is under new option ``sms`` being sms.management_class. + (https://github.com/ansible-collections/ibm_zos_core/pull/2276) +minor_changes: + - zos_backup_restore - Adds ``disable_automatic_class`` to specify that automatic class selection routines will not be used to determine classes for provide list. + Adds ``disable_automatic_storage_class`` to specify the automatic class selection routines will not be used to determine the source data set storage class. + Adds ``disable_automatic_management_class`` to specify the automatic class selection routines will not be used to determine the source data set management class. + (https://github.com/ansible-collections/ibm_zos_core/pull/2276) \ No newline at end of file diff --git a/changelogs/fragments/2290-update-verb-options.yml b/changelogs/fragments/2290-update-verb-options.yml new file mode 100644 index 0000000000..c632d97fd7 --- /dev/null +++ b/changelogs/fragments/2290-update-verb-options.yml @@ -0,0 +1,5 @@ +breaking_changes: + - zos_fetch - Option ``is_binary`` is replaced in favor of ``binary``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2290). + - zos_copy - Option ``is_binary`` is replaced in favor of ``binary``. Option ``is_executable`` is renamed to ``executable``. + (https://github.com/ansible-collections/ibm_zos_core/pull/2290). \ No newline at end of file diff --git a/changelogs/fragments/2320-support_administrator_share_words.yml b/changelogs/fragments/2320-support_administrator_share_words.yml new file mode 100644 index 0000000000..fee3194658 --- /dev/null +++ b/changelogs/fragments/2320-support_administrator_share_words.yml @@ -0,0 +1,6 @@ +minor_changes: + - zos_backup_restore - Adds ``access`` to specify how the module will access data sets and z/OS UNIX files when performing a backup or restore operation. + Adds ``share`` to specify the module allow data set read access to other programs while backing up or restoring. + Adds ``auth`` allows you to act as an administrator, where it will disable checking the current users privileges for z/OS UNIX files, data sets and + catalogs. + (https://github.com/ansible-collections/ibm_zos_core/pull/2320) \ No newline at end of file diff --git a/docs/source/modules/zos_apf.rst b/docs/source/modules/zos_apf.rst index 215de08519..8d06c883df 100644 --- a/docs/source/modules/zos_apf.rst +++ b/docs/source/modules/zos_apf.rst @@ -106,7 +106,7 @@ tmp_hlq persistent - Add/remove persistent entries to or from *data_set_name* + Add/remove persistent entries to or from *target* ``library`` will not be persisted or removed if ``persistent=None`` @@ -114,7 +114,7 @@ persistent | **type**: dict - data_set_name + target The data set name used for persisting or removing a ``library`` from the APF list. | **required**: True @@ -138,7 +138,7 @@ persistent backup - Creates a backup file or backup data set for *data_set_name*, including the timestamp information to ensure that you retrieve the original APF list defined in *data_set_name*". + Creates a backup file or backup data set for *target*, including the timestamp information to ensure that you retrieve the original APF list defined in *target*". *backup_name* can be used to specify a backup file name if *backup=true*. @@ -152,7 +152,7 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source *data_set_name* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. + If the source *target* is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. If the source is an MVS data set, the backup_name must be an MVS data set name. @@ -241,18 +241,18 @@ Examples library: SOME.SEQUENTIAL.DATASET force_dynamic: true persistent: - data_set_name: SOME.PARTITIONED.DATASET(MEM) + target: SOME.PARTITIONED.DATASET(MEM) - name: Remove a library from the APF list and persistence zos_apf: state: absent library: SOME.SEQUENTIAL.DATASET volume: T12345 persistent: - data_set_name: SOME.PARTITIONED.DATASET(MEM) + target: SOME.PARTITIONED.DATASET(MEM) - name: Batch libraries with custom marker, persistence for the APF list zos_apf: persistent: - data_set_name: "SOME.PARTITIONED.DATASET(MEM)" + target: "SOME.PARTITIONED.DATASET(MEM)" marker: "/* {mark} PROG001 USR0010 */" batch: - library: SOME.SEQ.DS1 @@ -304,6 +304,12 @@ stdout | **returned**: always | **type**: str +stdout_lines + List of strings containing individual lines from STDOUT. + + | **returned**: always + | **type**: list + stderr The error messages from ZOAU command apfadm @@ -311,6 +317,19 @@ stderr | **type**: str | **sample**: BGYSC1310E ADD Error: Dataset COMMON.LINKLIB volume COMN01 is already present in APF list. +stderr_lines + List of strings containing individual lines from STDERR. + + | **returned**: always + | **type**: list + | **sample**: + + .. code-block:: json + + [ + "BGYSC1310E ADD Error: Dataset COMMON.LINKLIB volume COMN01 is already present in APF list." + ] + rc The return code from ZOAU command apfadm diff --git a/docs/source/modules/zos_archive.rst b/docs/source/modules/zos_archive.rst index 2a51654019..2eb8c67079 100644 --- a/docs/source/modules/zos_archive.rst +++ b/docs/source/modules/zos_archive.rst @@ -55,7 +55,7 @@ format | **type**: dict - name + type The compression format to use. | **required**: False @@ -64,15 +64,15 @@ format | **choices**: bz2, gz, tar, zip, terse, xmit, pax - format_options + options Options specific to a compression format. | **required**: False | **type**: dict - terse_pack - Compression option for use with the terse format, *name=terse*. + spack + Compression option for use with the terse format, *type=terse*. Pack will compress records in a data set so that the output results in lossless data compression. @@ -81,8 +81,8 @@ format Spack will produce smaller output and take approximately 3 times longer than pack compression. | **required**: False - | **type**: str - | **choices**: pack, spack + | **type**: bool + | **default**: True xmit_log_data_set @@ -98,7 +98,7 @@ format | **type**: str - use_adrdssu + adrdssu If set to true, the ``zos_archive`` module will use Data Facility Storage Management Subsystem data set services (DFSMSdss) program ADRDSSU to compress data sets into a portable format before using ``xmit`` or ``terse``. | **required**: False @@ -407,7 +407,7 @@ Examples src: /tmp/archive/foo.txt dest: /tmp/archive/foo_archive_test.tar format: - name: tar + type: tar # Archive multiple files - name: Archive list of files into a zip @@ -417,7 +417,7 @@ Examples - /tmp/archive/bar.txt dest: /tmp/archive/foo_bar_archive_test.zip format: - name: zip + type: zip # Archive one data set into terse - name: Archive data set into a terse @@ -425,7 +425,7 @@ Examples src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse # Use terse with different options - name: Archive data set into a terse, specify pack algorithm and use adrdssu @@ -433,10 +433,10 @@ Examples src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - terse_pack: "spack" - use_adrdssu: true + type: terse + options: + spack: true + adrdssu: true # Use a pattern to store - name: Archive data set pattern using xmit @@ -445,7 +445,7 @@ Examples exclude_sources: "USER.ARCHIVE.EXCLUDE.*" dest: "USER.ARCHIVE.RESULT.XMIT" format: - name: xmit + type: xmit - name: Archive multiple GDSs into a terse zos_archive: @@ -455,18 +455,44 @@ Examples - "USER.GDG(-2)" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true - name: Archive multiple data sets into a new GDS zos_archive: src: "USER.ARCHIVE.*" dest: "USER.GDG(+1)" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true + + - name: Encode the source data set into Latin-1 before archiving into a terse data set + zos_archive: + src: "USER.ARCHIVE.TEST" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + type: terse + encoding: + from: IBM-1047 + to: ISO8859-1 + + - name: Encode and archive multiple data sets but skip encoding for a few. + zos_archive: + src: + - "USER.ARCHIVE1.TEST" + - "USER.ARCHIVE2.TEST" + dest: "USER.ARCHIVE.RESULT.TRS" + format: + type: terse + options: + adrdssu: true + encoding: + from: IBM-1047 + to: ISO8859-1 + skip_encoding: + - "USER.ARCHIVE2.TEST" - name: Encode the source data set into Latin-1 before archiving into a terse data set zos_archive: @@ -503,7 +529,7 @@ Notes .. note:: This module does not perform a send or transmit operation to a remote node. If you want to transport the archive you can use zos_fetch to retrieve to the controller and then zos_copy or zos_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. - When packing and using ``use_adrdssu`` flag the module will take up to two times the space indicated in ``dest_data_set``. + When packing and using ``adrdssu`` flag the module will take up to two times the space indicated in ``dest_data_set``. tar, zip, bz2 and pax are archived using python ``tarfile`` library which uses the latest version available for each format, for compatibility when opening from system make sure to use the latest available version for the intended format. @@ -524,6 +550,12 @@ Return Values ------------- +dest + The remote absolute path or data set where the archive was created. + + | **returned**: always + | **type**: str + state The state of the input ``src``. diff --git a/docs/source/modules/zos_backup_restore.rst b/docs/source/modules/zos_backup_restore.rst index 26fc889d37..d99e8a9247 100644 --- a/docs/source/modules/zos_backup_restore.rst +++ b/docs/source/modules/zos_backup_restore.rst @@ -31,6 +31,52 @@ Parameters ---------- +access + Specifies how the module will access data sets and z/OS UNIX files when performing a backup or restore operation. + + | **required**: False + | **type**: dict + + + share + Specifies that the module allow data set read access to other programs while backing up or restoring. + + *share* and ``full_volume`` are mutually exclusive; you cannot use both. + + Option *share*is conditionally supported for *operation=backup* or *operation=restore*. + + When *operation=backup*, and source backup is a VSAM data set, the option is only supported for VSAM data sets which are not defined with VSAM SHAREOPTIONS (1,3) or (1,4). - When *operation=restore*, and restore target is a VSAM data set or PDSE data set, this option is not supported. Both data set types will be accessed exlusivly preventing reading or writing to the VSAM, PDSE, or PDSE members. + + The SHAREOPTIONS for VSAM data sets. + + (1) the data set can be shared by multiple programs for read-only processing, or a single program for read and write processing. + + (2) the data set can be accessed by multiple programs for read-only processing, and can also be accessed by a program for write processing. + + (3) the data set can be shared by multiple programs where each program is responsible for maintaining both read and write data integrity. + + (4) the data set can be shared by multiple programs where each program is responsible for maintaining both read and write data integrity differing from (3) in that I/O buffers are updated for each request. + + | **required**: False + | **type**: bool + | **default**: False + + + auth + *auth=true* allows you to act as an administrator, where it will disable checking the current users privileges for z/OS UNIX files, data sets and catalogs. + + This is option is supported both, *operation=backup* and *operation=restore*. + + If you are not authorized to use this option, the module ends with an error message. + + Some authorization checking for data sets is unavoidable, when when *auth* is specified because some checks are initiated by services and programs invoked by this module which can not be bypassed. + + | **required**: False + | **type**: bool + | **default**: False + + + operation Used to specify the operation to perform. @@ -150,26 +196,99 @@ overwrite | **default**: False -sms_storage_class - When *operation=restore*, specifies the storage class to use. The storage class will also be used for temporary data sets created during restore process. +compress + When *operation=backup*, enables compression of partitioned data sets using system-level compression features. If supported, this may utilize zEDC hardware compression. - When *operation=backup*, specifies the storage class to use for temporary data sets created during backup process. + This option can reduce the size of the temporary dataset generated during backup operations either before the AMATERSE step when *terse* is True or the resulting backup when *terse* is False. + + | **required**: False + | **type**: bool + | **default**: False - If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + +terse + When *operation=backup*, executes an AMATERSE step to compress and pack the temporary data set for the backup. This creates a backup with a format suitable for transferring off-platform. + + If *operation=backup* and if *dataset=False* then option *terse* must be True. | **required**: False - | **type**: str + | **type**: bool + | **default**: True -sms_management_class - When *operation=restore*, specifies the management class to use. The management class will also be used for temporary data sets created during restore process. +sms + Specifies how System Managed Storage (SMS) interacts with the storage class and management class when either backup or restore operations are occurring. - When *operation=backup*, specifies the management class to use for temporary data sets created during backup process. + Storage class contains performance and availability attributes related to the storage occupied by the data set. A data set that has a storage class assigned to it is defined as an 'SMS-managed' data set. - If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + Management class contains the data set attributes related to the migration and backup of the data set and the expiration date of the data set. A management class can be assigned only to a data set that also has a storage class assigned. | **required**: False - | **type**: str + | **type**: dict + + + storage_class + When *operation=restore*, specifies the storage class to use. The storage class will also be used for temporary data sets created during restore process. + + When *operation=backup*, specifies the storage class to use for temporary data sets created during backup process. + + If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + + | **required**: False + | **type**: str + + + management_class + When *operation=restore*, specifies the management class to use. The management class will also be used for temporary data sets created during restore process. + + When *operation=backup*, specifies the management class to use for temporary data sets created during backup process. + + If neither of *sms_storage_class* or *sms_management_class* are specified, the z/OS system's Automatic Class Selection (ACS) routines will be used. + + | **required**: False + | **type**: str + + + disable_automatic_class + Specifies that the automatic class selection (ACS) routines will not be used to determine the target data set class names for the provided list. + + The list must contain fully or partially qualified data set names. + + To include all selected data sets, "**" in a list. + + You must have READ access to RACF FACILITY class profile `STGADMIN.ADR.RESTORE.BYPASSACS` to use this option. + + | **required**: False + | **type**: list + | **elements**: str + | **default**: [] + + + disable_automatic_storage_class + Specifies that automatic class selection (ACS) routines will not be used to determine the source data set storage class. + + Enabling *disable_automatic_storage_class* ensures ACS is null. + + *storage_class* and *disable_automatic_storage_class* are mutually exclusive; you cannot use both. + + The combination of *disable_automatic_storage_class* and ``disable_automatic_class=[dsn,dsn1,...]`` ensures the selected data sets will not be SMS-managed. + + | **required**: False + | **type**: bool + | **default**: False + + + disable_automatic_management_class + Specifies that automatic class selection (ACS) routines will not be used to determine the source data set management class. + + Enabling *disable_automatic_storage_class* ensures ACS is null. + + *management_class* and *disable_automatic_management_class* are mutually exclusive; you cannot use both. + + | **required**: False + | **type**: bool + | **default**: False + space @@ -194,6 +313,7 @@ space_type | **required**: False | **type**: str + | **default**: m | **choices**: k, m, g, cyl, trk @@ -217,6 +337,18 @@ tmp_hlq | **type**: str +index + When ``operation=backup`` specifies that for any VSAM cluster backup, the backup must also contain all the associated alternate index (AIX®) clusters and paths. + + When ``operation=restore`` specifies that for any VSAM cluster dumped with the SPHERE keyword, the module must also restore all associated AIX® clusters and paths. + + The alternate index is a VSAM function that allows logical records of a KSDS or ESDS to be accessed sequentially and directly by more than one key field. The cluster that has the data is called the base cluster. An alternate index cluster is then built from the base cluster. + + | **required**: False + | **type**: bool + | **default**: False + + Attributes @@ -266,6 +398,15 @@ Examples - user.gdg(0) backup_name: my.backup.dzp + - name: Backup datasets using compress + zos_backup_restore: + operation: backup + compress: true + terse: true + data_sets: + include: someds.name.here + backup_name: my.backup.dzp + - name: Backup all datasets matching the pattern USER.** to UNIX file /tmp/temp_backup.dzp, ignore recoverable errors. zos_backup_restore: operation: backup @@ -357,8 +498,52 @@ Examples operation: restore volume: MYVOL2 backup_name: /tmp/temp_backup.dzp - sms_storage_class: DB2SMS10 - sms_management_class: DB2SMS10 + sms: + storage_class: DB2SMS10 + management_class: DB2SMS10 + + - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. + Disable for all datasets SMS storage and management classes data sets. + zos_backup_restore: + operation: restore + volume: MYVOL2 + backup_name: /tmp/temp_backup.dzp + sms: + disable_automatic_class: + - "**" + disable_automatic_storage_class: true + disable_automatic_management_class: true + + - name: Restore data sets from backup stored in the MVS file MY.BACKUP.DZP + Disable for al some datasets SMS storage and management classes data sets. + zos_backup_restore: + operation: restore + volume: MYVOL2 + backup_name: MY.BACKUP.DZP + sms: + disable_automatic_class: + - "ANSIBLE.TEST.**" + - "**.ONE.**" + disable_automatic_storage_class: true + disable_automatic_management_class: true + + - name: Backup all data sets matching the pattern USER.VSAM.** to z/OS UNIX + file /tmp/temp_backup.dzp and ensure the VSAM alternate index are preserved. + zos_backup_restore: + operation: backup + data_sets: + include: user.vsam.** + backup_name: /tmp/temp_backup.dzp + index: true + + - name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp + whether they exist or not and do so as authorized disabling any security checks. + zos_backup_restore: + operation: restore + backup_name: /tmp/temp_backup.dzp + access: + auth: true + share: true diff --git a/docs/source/modules/zos_blockinfile.rst b/docs/source/modules/zos_blockinfile.rst index 041182ca10..a14d39efac 100644 --- a/docs/source/modules/zos_blockinfile.rst +++ b/docs/source/modules/zos_blockinfile.rst @@ -397,6 +397,18 @@ stderr | **type**: str | **sample**: BGYSC1311E Iconv error, cannot open converter from ISO-88955-1 to IBM-1047 +stdout_lines + List of strings containing individual lines from stdout. + + | **returned**: failure + | **type**: list + +stderr_lines + List of strings containing individual lines from stderr. + + | **returned**: failure + | **type**: list + rc The return code from ZOAU dmod when json.loads() fails to parse the result from dmod diff --git a/docs/source/modules/zos_copy.rst b/docs/source/modules/zos_copy.rst index 5fe5e565f5..3cf2ad1cd0 100644 --- a/docs/source/modules/zos_copy.rst +++ b/docs/source/modules/zos_copy.rst @@ -37,7 +37,17 @@ asa_text If neither ``src`` or ``dest`` have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. - This option is only valid for text files. If ``is_binary`` is ``true`` or ``executable`` is ``true`` as well, the module will fail. + This option is only valid for text files. If ``binary`` is ``true`` or ``executable`` is ``true`` as well, the module will fail. + + | **required**: False + | **type**: bool + | **default**: False + + +identical_gdg_copy + If set to ``true``, and the destination GDG does not exist, the module will copy the source GDG to the destination GDG with identical GDS absolute names. + + If set to ``false``, the copy will be done as a normal copy, without preserving the source GDG absolute names. | **required**: False | **type**: bool @@ -109,7 +119,7 @@ dest If ``dest`` is a nonexistent data set, it will be created following the process outlined here and in the ``volume`` option. - If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *is_binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32720, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. + If ``dest`` is a nonexistent data set, the attributes assigned will depend on the type of ``src``. If ``src`` is a USS file, ``dest`` will have a Fixed Block (FB) record format and the remaining attributes will be computed. If *binary=true*, ``dest`` will have a Fixed Block (FB) record format with a record length of 80, block size of 32720, and the remaining attributes will be computed. If *executable=true*,``dest`` will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be computed. If ``src`` is a file and ``dest`` a partitioned data set, ``dest`` does not need to include a member in its value, the module can automatically compute the resulting member name from ``src``. @@ -136,7 +146,7 @@ encoding If ``encoding`` is not provided, the module determines which local and remote charsets to convert the data from and to. Note that this is only done for text data and not binary data. - Only valid if ``is_binary`` is false. + Only valid if ``binary`` is false. | **required**: False | **type**: dict @@ -166,7 +176,7 @@ tmp_hlq | **type**: str -force +replace If set to ``true`` and the remote file or data set ``dest`` is empty, the ``dest`` will be reused. If set to ``true`` and the remote file or data set ``dest`` is NOT empty, the ``dest`` will be deleted and recreated with the ``src`` data set attributes, otherwise it will be recreated with the ``dest`` data set attributes. @@ -182,12 +192,12 @@ force | **default**: False -force_lock - By default, when ``dest`` is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force_lock`` to bypass DISP=SHR and continue with the copy operation. +force + By default, when ``dest`` is a MVS data set and is being used by another process with DISP=SHR or DISP=OLD the module will fail. Use ``force`` to bypass DISP=SHR and continue with the copy operation. If set to ``true`` and destination is a MVS data set opened by another process then zos_copy will try to copy using DISP=SHR. - Using ``force_lock`` uses operations that are subject to race conditions and can lead to data loss, use with caution. + Using ``force`` uses operations that are subject to race conditions and can lead to data loss, use with caution. If a data set member has aliases, and is not a program object, copying that member to a dataset that is in use will result in the aliases not being preserved in the target dataset. When this scenario occurs the module will fail. @@ -206,12 +216,12 @@ ignore_sftp_stderr | **default**: True -is_binary +binary If set to ``true``, indicates that the file or data set to be copied is a binary file or data set. - When *is_binary=true*, no encoding conversion is applied to the content, all content transferred retains the original state. + When *binary=true*, no encoding conversion is applied to the content, all content transferred retains the original state. - Use *is_binary=true* when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. + Use *binary=true* when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. | **required**: False | **type**: bool @@ -794,7 +804,7 @@ Examples zos_copy: src: /path/to/binary/file dest: HLQ.SAMPLE.PDSE(MEMBER) - is_binary: true + binary: true - name: Copy a sequential data set to a PDS member zos_copy: @@ -820,14 +830,14 @@ Examples src: HLQ.SAMPLE.PDSE dest: HLQ.EXISTING.PDSE remote_src: true - force: true + replace: true - name: Copy PDS member to a new PDS member. Replace if it already exists zos_copy: src: HLQ.SAMPLE.PDSE(SRCMEM) dest: HLQ.NEW.PDSE(DESTMEM) remote_src: true - force: true + replace: true - name: Copy a USS file to a PDSE member. If PDSE does not exist, allocate it zos_copy: @@ -1118,7 +1128,7 @@ state note A note to the user after module terminates. - | **returned**: When ``force=true`` and ``dest`` exists + | **returned**: When ``replace=true`` and ``dest`` exists | **type**: str | **sample**: No data was copied diff --git a/docs/source/modules/zos_data_set.rst b/docs/source/modules/zos_data_set.rst index 1f8b6e9c25..6d76dfaf92 100644 --- a/docs/source/modules/zos_data_set.rst +++ b/docs/source/modules/zos_data_set.rst @@ -311,6 +311,18 @@ scratch | **default**: False +noscratch + When ``state=absent``, specifies whether to keep the data set's entry in the VTOC. + + If ``noscratch=True``, the data set is uncataloged but not physically removed from the volume. The Data Set Control Block is not removed from the VTOC. + + This is the equivalent of using ``NOSCRATCH`` in an ``IDCAMS DELETE`` command. + + | **required**: False + | **type**: bool + | **default**: False + + volumes If cataloging a data set, *volumes* specifies the name of the volume(s) where the data set is located. @@ -598,6 +610,18 @@ batch | **default**: False + noscratch + When ``state=absent``, specifies whether to keep the data set's entry in the VTOC. + + If ``noscratch=True``, the data set is uncataloged but not physically removed from the volume. The Data Set Control Block is not removed from the VTOC. + + This is the equivalent of using ``NOSCRATCH`` in an ``IDCAMS DELETE`` command. + + | **required**: False + | **type**: bool + | **default**: False + + extended Sets the *extended* attribute for Generation Data Groups. @@ -783,6 +807,12 @@ Examples name: someds.name.here state: absent + - name: Uncatalog a data set but do not remove it from the volume. + zos_data_set: + name: someds.name.here + state: absent + noscratch: true + - name: Delete a data set if it exists. If data set not cataloged, check on volume 222222 for the data set, and then catalog and delete if found. zos_data_set: name: someds.name.here diff --git a/docs/source/modules/zos_encode.rst b/docs/source/modules/zos_encode.rst index 4a5e61f798..b570c5ed8c 100644 --- a/docs/source/modules/zos_encode.rst +++ b/docs/source/modules/zos_encode.rst @@ -333,3 +333,24 @@ backup_name | **type**: str | **sample**: /path/file_name.2020-04-23-08-32-29-bak.tar +encoding + Specifies which encodings the destination file or data set was converted from and to. + + | **returned**: always + | **type**: dict + + from + The character set of the source *src*. + + | **returned**: always + | **type**: str + | **sample**: IBM-1047 + + to + The destination *dest* character set for the output that was written as. + + | **returned**: always + | **type**: str + | **sample**: ISO8859-1 + + diff --git a/docs/source/modules/zos_fetch.rst b/docs/source/modules/zos_fetch.rst index 8a341dfcdc..a9b6a01926 100644 --- a/docs/source/modules/zos_fetch.rst +++ b/docs/source/modules/zos_fetch.rst @@ -74,7 +74,7 @@ flat | **default**: false -is_binary +binary Specifies if the file being fetched is a binary. | **required**: False @@ -173,7 +173,7 @@ Examples src: SOME.PDS.DATASET dest: /tmp/ flat: true - is_binary: true + binary: true - name: Fetch a UNIX file and don't validate its checksum zos_fetch: @@ -257,9 +257,11 @@ Return Values ------------- -file +src The source file path or data set on the remote machine. + If the source is not found, then src will be empty. + | **returned**: success | **type**: str | **sample**: SOME.DATA.SET @@ -271,7 +273,7 @@ dest | **type**: str | **sample**: /tmp/SOME.DATA.SET -is_binary +binary Indicates the transfer mode that was used to fetch. | **returned**: success @@ -296,17 +298,10 @@ data_set_type | **type**: str | **sample**: PDSE -note - Notice of module failure when ``fail_on_missing`` is false. - - | **returned**: failure and fail_on_missing=false - | **type**: str - | **sample**: The data set USER.PROCLIB does not exist. No data was fetched. - msg - Message returned on failure. + Any important messages from the module. - | **returned**: failure + | **returned**: always | **type**: str | **sample**: The source 'TEST.DATA.SET' does not exist or is uncataloged. diff --git a/docs/source/modules/zos_find.rst b/docs/source/modules/zos_find.rst index 1c3d5222c1..211a4709fb 100644 --- a/docs/source/modules/zos_find.rst +++ b/docs/source/modules/zos_find.rst @@ -68,6 +68,8 @@ excludes If the pattern is a regular expression, it must match the full data set name. + To exclude members, the regular expression or pattern must be enclosed in parentheses. This expression can be used alongside a pattern to exclude data set names. + | **required**: False | **type**: list | **elements**: str @@ -80,8 +82,6 @@ patterns This parameter expects a list, which can be either comma separated or YAML. - If ``pds_patterns`` is provided, ``patterns`` must be member patterns. - When searching for members within a PDS/PDSE, pattern can be a regular expression. | **required**: True @@ -102,18 +102,6 @@ size | **type**: str -pds_patterns - List of PDS/PDSE to search. Wildcard is possible. - - Required when searching for data set members. - - Valid only for ``nonvsam`` resource types. Otherwise ignored. - - | **required**: False - | **type**: list - | **elements**: str - - resource_type The types of resources to search. @@ -228,6 +216,22 @@ Examples .. code-block:: yaml+jinja + - name: Exclude all members starting with characters 'TE' in a given list datasets patterns + zos_find: + excludes: '(^te.*)' + patterns: + - IMSTEST.TEST.* + - IMSTEST.USER.* + - USER.*.LIB + + - name: Exclude datasets that includes 'DATA' and members starting with characters 'MEM' in a given list datasets patterns + zos_find: + excludes: '^.*DATA.*(^MEM.*)' + patterns: + - IMSTEST.*.TEST + - IMSTEST.*.* + - USER.*.LIB + - name: Find all data sets with HLQ 'IMS.LIB' or 'IMSTEST.LIB' that contain the word 'hello' zos_find: patterns: diff --git a/docs/source/modules/zos_job_output.rst b/docs/source/modules/zos_job_output.rst index c58610d44e..6b6a1fe07e 100644 --- a/docs/source/modules/zos_job_output.rst +++ b/docs/source/modules/zos_job_output.rst @@ -21,7 +21,8 @@ Synopsis - The job id can be specific such as "STC02560", or one that uses a pattern such as "STC*" or "*". - The job name can be specific such as "TCPIP", or one that uses a pattern such as "TCP*" or "*". - The owner can be specific such as "IBMUSER", or one that uses a pattern like "*". -- If there is no ddname, or if ddname="?", output of all the ddnames under the given job will be displayed. +- If there is no dd_name, or if dd_name="?", output of all the dds under the given job will be displayed. +- If SYSIN DDs are needed, *sysin_dd* should be set to ``true``. @@ -52,13 +53,21 @@ owner | **type**: str -ddname +dd_name Data definition name (show only this DD on a found job). (e.g "JESJCL", "?") | **required**: False | **type**: str +sysin_dd + Whether to include SYSIN DDs as part of the output. + + | **required**: False + | **type**: bool + | **default**: False + + Attributes @@ -81,21 +90,26 @@ Examples .. code-block:: yaml+jinja - - name: Job output with ddname + - name: Job output with dd_name zos_job_output: job_id: "STC02560" - ddname: "JESMSGLG" + dd_name: "JESMSGLG" - - name: JES Job output without ddname + - name: JES Job output without dd_name zos_job_output: job_id: "STC02560" - - name: JES Job output with all ddnames + - name: JES Job output with all dd_name zos_job_output: job_id: "STC*" job_name: "*" owner: "IBMUSER" - ddname: "?" + dd_name: "?" + + - name: Query a job's output including SYSIN DDs + zos_job_output: + job_id: "JOB00548" + sysin_dd: true @@ -147,7 +161,7 @@ jobs "- 6 SYSOUT SPOOL KBYTES", "- 0.00 MINUTES EXECUTION TIME" ], - "ddname": "JESMSGLG", + "dd_name": "JESMSGLG", "id": "2", "procstep": "", "record_count": "17", @@ -171,7 +185,7 @@ jobs " 6 //SYSUT2 DD SYSOUT=* ", " 7 // " ], - "ddname": "JESJCL", + "dd_name": "JESJCL", "id": "3", "procstep": "", "record_count": "14", @@ -200,7 +214,7 @@ jobs " IEF033I JOB/HELLO /STOP 2020049.1025 ", " CPU: 0 HR 00 MIN 00.00 SEC SRB: 0 HR 00 MIN 00.00 SEC " ], - "ddname": "JESYSMSG", + "dd_name": "JESYSMSG", "id": "4", "procstep": "", "record_count": "19", @@ -214,7 +228,7 @@ jobs " ", " PROCESSING ENDED AT EOD " ], - "ddname": "SYSPRINT", + "dd_name": "SYSPRINT", "id": "102", "procstep": "", "record_count": "4", @@ -225,7 +239,7 @@ jobs "content": [ " HELLO, WORLD " ], - "ddname": "SYSUT2", + "dd_name": "SYSUT2", "id": "103", "procstep": "", "record_count": "1", @@ -247,13 +261,7 @@ jobs "code": 0, "msg": "CC 0000", "msg_code": "0000", - "msg_txt": "", - "steps": [ - { - "step_cc": 0, - "step_name": "STEP0001" - } - ] + "msg_txt": "" }, "subsystem": "STL1", "system": "STL1" @@ -331,13 +339,13 @@ jobs | **type**: str | **sample**: 00:00:10 - ddnames + dds Data definition names. | **type**: list | **elements**: dict - ddname + dd_name Data definition name. | **type**: str @@ -374,7 +382,7 @@ jobs | **sample**: 574 content - The ddname content. + The dd content. | **type**: list | **elements**: str @@ -448,13 +456,7 @@ jobs "code": 0, "msg": "CC 0000", "msg_code": "0000", - "msg_txt": "", - "steps": [ - { - "step_cc": 0, - "step_name": "STEP0001" - } - ] + "msg_txt": "" } } @@ -481,29 +483,39 @@ jobs | **type**: int - steps - Series of JCL steps that were executed and their return codes. - | **type**: list - | **elements**: dict + steps + Series of JCL steps that were executed and their return codes. - step_name - Name of the step shown as "was executed" in the DD section. + | **type**: list + | **elements**: dict + | **sample**: + + .. code-block:: json - | **type**: str - | **sample**: STEP0001 + [ + { + "step_cc": 0, + "step_name": "STEP0001" + } + ] + + step_name + Name of the step shown as "was executed" in the DD section. - step_cc - The CC returned for this step in the DD section. + | **type**: str + | **sample**: STEP0001 - | **type**: int + step_cc + The CC returned for this step in the DD section. + | **type**: int changed Indicates if any changes were made during module operation - | **returned**: on success + | **returned**: always | **type**: bool diff --git a/docs/source/modules/zos_job_query.rst b/docs/source/modules/zos_job_query.rst index 38cea61e34..0f9091635a 100644 --- a/docs/source/modules/zos_job_query.rst +++ b/docs/source/modules/zos_job_query.rst @@ -138,6 +138,11 @@ changed | **returned**: always | **type**: bool + | **sample**: + + .. code-block:: json + + true jobs The output information for a list of jobs matching specified criteria. If no job status is found, this will return ret_code dictionary with parameter msg_txt = The job could not be found. @@ -158,12 +163,13 @@ jobs "creation_time": "12:13:00", "execution_node": "STL1", "execution_time": "00:00:02", - "job_class": "K", + "job_class": "STC", "job_id": "JOB01427", "job_name": "LINKJOB", "origin_node": "STL1", "owner": "ADMIN", "priority": 1, + "program_name": "BPXBATCH", "queue_position": 3, "ret_code": "null", "subsystem": "STL1", @@ -184,10 +190,13 @@ jobs "origin_node": "STL1", "owner": "ADMIN", "priority": 0, + "program_name": "null", "queue_position": 0, "ret_code": { "code": "null", - "msg": "CANCELED" + "msg": "CANCELED", + "msg_code": "null", + "msg_txt": "CANCELED" }, "subsystem": "STL1", "svc_class": "E", @@ -274,13 +283,7 @@ jobs "code": 0, "msg": "CC 0000", "msg_code": "0000", - "msg_txt": "", - "steps": [ - { - "step_cc": 0, - "step_name": "STEP0001" - } - ] + "msg_txt": "" } } @@ -326,6 +329,36 @@ jobs + steps + Series of JCL steps that were executed and their return codes. + + | **type**: list + | **elements**: dict + | **sample**: + + .. code-block:: json + + { + "steps": [ + { + "step_cc": 0, + "step_name": "STEP0001" + } + ] + } + + step_name + Name of the step shown as "was executed" in the DD section. + + | **type**: str + | **sample**: STEP0001 + + step_cc + The CC returned for this step in the DD section. + + | **type**: int + + job_class Job class for this job. @@ -380,7 +413,7 @@ jobs | **sample**: 00:00:10 -message +msg Message returned on failure. | **returned**: failure diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 6d31b6abd0..9acd856296 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -43,27 +43,24 @@ src | **type**: str -location - The JCL location. Supported choices are ``data_set``, ``uss`` or ``local``. +remote_src + If set to ``false``, the module searches for ``src`` in the controller node. - ``data_set`` can be a PDS, PDSE, sequential data set, or a generation data set. - - ``uss`` means the JCL location is located in UNIX System Services (USS). - - ``local`` means locally to the Ansible control node. + If set to ``true``, the module searches for the file ``src`` in the managed node. | **required**: False - | **type**: str - | **default**: data_set - | **choices**: data_set, uss, local + | **type**: bool + | **default**: True -wait_time_s - Option *wait_time_s* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. +wait_time + Option *wait_time* is the total time that module `zos_job_submit <./zos_job_submit.html>`_ will wait for a submitted job to complete. The time begins when the module is executed on the managed node. - *wait_time_s* is measured in seconds and must be a value greater than 0 and less than 86400. + *wait_time* is measured in seconds and must be a value greater than 0 and less than 86400. - The module can submit and forget jobs by setting *wait_time_s* to 0. This way the module will not try to retrieve the job details other than job id. Job details and contents can be retrieved later by using `zos_job_query <./zos_job_query.html>`_ or `zos_job_output <./zos_job_output.html>`_ if needed. + The module can submit and forget jobs by setting *wait_time* to 0. This way the module will not try to retrieve the job details other than job id. Job details and contents can be retrieved later by using `zos_job_query <./zos_job_query.html>`_ or `zos_job_output <./zos_job_output.html>`_ if needed. + + If *remote_src=False* and *wait_time=0*, the module will not clean the copy of the file on the remote system, to avoid problems with job submission. | **required**: False | **type**: int @@ -80,7 +77,7 @@ max_rc return_output Whether to print the DD output. - If false, an empty list will be returned in the ddnames field. + If false, an empty list will be returned in the dds field. | **required**: False | **type**: bool @@ -92,7 +89,7 @@ volume When configured, the `zos_job_submit <./zos_job_submit.html>`_ will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - Ignored for *location=uss* and *location=local*. + Ignored for *remote_src=False*. | **required**: False | **type**: str @@ -101,7 +98,7 @@ volume encoding Specifies which encoding the local JCL file should be converted from and to, before submitting the job. - This option is only supported for when *location=local*. + This option is only supported for when *remote_src=False*. If this parameter is not provided, and the z/OS systems default encoding can not be identified, the JCL file will be converted from UTF-8 to IBM-1047 by default, otherwise the module will detect the z/OS system encoding. @@ -295,19 +292,19 @@ Examples - name: Submit JCL in a PDSE member. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: data_set + remote_src: true register: response - name: Submit JCL in USS with no DDs in the output. zos_job_submit: src: /u/tester/demo/sample.jcl - location: uss + remote_src: true return_output: false - name: Convert local JCL to IBM-037 and submit the job. zos_job_submit: src: /Users/maxy/ansible-playbooks/provision/sample.jcl - location: local + remote_src: false encoding: from: ISO8859-1 to: IBM-037 @@ -315,36 +312,36 @@ Examples - name: Submit JCL in an uncataloged PDSE on volume P2SS01. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: data_set + remote_src: true volume: P2SS01 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: data_set - wait_time_s: 30 + remote_src: true + wait_time: 30 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: data_set - wait_time_s: 30 + remote_src: true + wait_time: 30 - name: Submit JCL and set the max return code the module should fail on to 16. zos_job_submit: src: HLQ.DATA.LLQ - location: data_set + remote_src: true max_rc: 16 - name: Submit JCL from the latest generation data set in a generation data group. zos_job_submit: src: HLQ.DATA.GDG(0) - location: data_set + remote_src: true - name: Submit JCL from a previous generation data set in a generation data group. zos_job_submit: src: HLQ.DATA.GDG(-2) - location: data_set + remote_src: true @@ -380,12 +377,11 @@ jobs [ { "asid": 0, - "class": "K", "content_type": "JOB", "cpu_time": 1, "creation_date": "2023-05-03", "creation_time": "12:13:00", - "ddnames": [ + "dds": [ { "byte_count": "677", "content": [ @@ -406,7 +402,7 @@ jobs "- 12 SYSOUT SPOOL KBYTES", "- 0.00 MINUTES EXECUTION TIME" ], - "ddname": "JESMSGLG", + "dd_name": "JESMSGLG", "id": "2", "procstep": "", "record_count": "16", @@ -463,7 +459,7 @@ jobs " 15 ++SYSPRINT DD SYSOUT=* ", " ++* " ], - "ddname": "JESJCL", + "dd_name": "JESJCL", "id": "3", "procstep": "", "record_count": "47", @@ -517,7 +513,7 @@ jobs " IEF033I JOB/DBDGEN00/STOP 2020073.1250 ", " CPU: 0 HR 00 MIN 00.03 SEC SRB: 0 HR 00 MIN 00.00 SEC " ], - "ddname": "JESYSMSG", + "dd_name": "JESYSMSG", "id": "4", "procstep": "", "record_count": "44", @@ -572,7 +568,7 @@ jobs " **** END OF MESSAGE SUMMARY REPORT **** ", " " ], - "ddname": "SYSPRINT", + "dd_name": "SYSPRINT", "id": "102", "procstep": "L", "record_count": "45", @@ -593,14 +589,14 @@ jobs "code": 0, "msg": "CC 0000", "msg_code": "0000", - "msg_txt": "", - "steps": [ - { - "step_cc": 0, - "step_name": "DLORD6" - } - ] + "msg_txt": "" }, + "steps": [ + { + "step_cc": 0, + "step_name": "DLORD6" + } + ], "subsystem": "STL1", "svc_class": "?", "system": "STL1" @@ -648,13 +644,13 @@ jobs | **type**: str | **sample**: 00:00:10 - ddnames + dds Data definition names. | **type**: list | **elements**: dict - ddname + dd_name Data definition name. | **type**: str @@ -691,7 +687,7 @@ jobs | **sample**: 574 content - The ddname content. + The dd content. | **type**: list | **elements**: str @@ -730,13 +726,7 @@ jobs "code": 0, "msg": "CC 0000", "msg_code": "0000", - "msg_txt": "", - "steps": [ - { - "step_cc": 0, - "step_name": "STEP0001" - } - ] + "msg_txt": "" } } @@ -796,23 +786,35 @@ jobs | **type**: int - steps - Series of JCL steps that were executed and their return codes. - | **type**: list - | **elements**: dict + steps + Series of JCL steps that were executed and their return codes. - step_name - Name of the step shown as "was executed" in the DD section. + | **type**: list + | **elements**: dict + | **sample**: + + .. code-block:: json - | **type**: str - | **sample**: STEP0001 + { + "steps": [ + { + "step_cc": 0, + "step_name": "STEP0001" + } + ] + } - step_cc - The CC returned for this step in the DD section. + step_name + Name of the step shown as "was executed" in the DD section. - | **type**: int + | **type**: str + | **sample**: STEP0001 + step_cc + The CC returned for this step in the DD section. + + | **type**: int job_class diff --git a/docs/source/modules/zos_lineinfile.rst b/docs/source/modules/zos_lineinfile.rst index 3ed1a1e339..60fa24005a 100644 --- a/docs/source/modules/zos_lineinfile.rst +++ b/docs/source/modules/zos_lineinfile.rst @@ -335,13 +335,31 @@ msg | **type**: str | **sample**: Parameter verification failed -return_content +stdout + The stdout from ZOAU dsed command. + + | **returned**: always + | **type**: str + +stderr The error messages from ZOAU dsed - | **returned**: failure + | **returned**: always | **type**: str | **sample**: BGYSC1311E Iconv error, cannot open converter from ISO-88955-1 to IBM-1047 +stdout_lines + List of strings containing individual lines from stdout. + + | **returned**: always + | **type**: list + +stderr_lines + List of strings containing individual lines from stderr. + + | **returned**: always + | **type**: list + backup_name Name of the backup file or data set that was created. diff --git a/docs/source/modules/zos_mount.rst b/docs/source/modules/zos_mount.rst index 703795c3da..bd0fd6e7cf 100644 --- a/docs/source/modules/zos_mount.rst +++ b/docs/source/modules/zos_mount.rst @@ -63,25 +63,25 @@ fs_type state The desired status of the described mount (choice). - If *state=mounted* and *src* are not in use, the module will add the file system entry to the parmlib member *persistent/data_store* if not present. The *path* will be updated, the device will be mounted and the module will complete successfully with *changed=True*. + If *state=mounted* and *src* are not in use, the module will add the file system entry to the parmlib member *persistent/name* if not present. The *path* will be updated, the device will be mounted and the module will complete successfully with *changed=True*. - If *state=mounted* and *src* are in use, the module will add the file system entry to the parmlib member *persistent/data_store* if not present. The *path* will not be updated, the device will not be mounted and the module will complete successfully with *changed=False*. + If *state=mounted* and *src* are in use, the module will add the file system entry to the parmlib member *persistent/name* if not present. The *path* will not be updated, the device will not be mounted and the module will complete successfully with *changed=False*. - If *state=unmounted* and *src* are in use, the module will **not** add the file system entry to the parmlib member *persistent/data_store*. The device will be unmounted and the module will complete successfully with *changed=True*. + If *state=unmounted* and *src* are in use, the module will **not** add the file system entry to the parmlib member *persistent/name*. The device will be unmounted and the module will complete successfully with *changed=True*. - If *state=unmounted* and *src* are not in use, the module will **not** add the file system entry to parmlib member *persistent/data_store*.The device will remain unchanged and the module will complete with *changed=False*. + If *state=unmounted* and *src* are not in use, the module will **not** add the file system entry to parmlib member *persistent/name*.The device will remain unchanged and the module will complete with *changed=False*. - If *state=present*, the module will add the file system entry to the provided parmlib member *persistent/data_store* if not present. The module will complete successfully with *changed=True*. + If *state=present*, the module will add the file system entry to the provided parmlib member *persistent/name* if not present. The module will complete successfully with *changed=True*. - If *state=absent*, the module will remove the file system entry to the provided parmlib member *persistent/data_store* if present. The module will complete successfully with *changed=True*. + If *state=absent*, the module will remove the file system entry to the provided parmlib member *persistent/name* if present. The module will complete successfully with *changed=True*. - If *state=remounted*, the module will **not** add the file system entry to parmlib member *persistent/data_store*. The device will be unmounted and mounted, the module will complete successfully with *changed=True*. + If *state=remounted*, the module will **not** add the file system entry to parmlib member *persistent/name*. The device will be unmounted and mounted, the module will complete successfully with *changed=True*. | **required**: False @@ -91,13 +91,13 @@ state persistent - Add or remove mount command entries to provided *data_store* + Add or remove mount command entries to provided *name* | **required**: False | **type**: dict - data_store + name The data set name used for persisting a mount command. This is usually BPXPRMxx or a copy. | **required**: True @@ -105,7 +105,7 @@ persistent backup - Creates a backup file or backup data set for *data_store*, including the timestamp information to ensure that you retrieve the original parameters defined in *data_store*. + Creates a backup file or backup data set for *name*, including the timestamp information to ensure that you retrieve the original parameters defined in *name*. *backup_name* can be used to specify a backup file name if *backup=true*. @@ -119,7 +119,7 @@ persistent backup_name Specify the USS file name or data set name for the destination backup. - If the source *data_store* is a USS file or path, the *backup_name* name can be relative or absolute for file or path name. + If the source *name* is a USS file or path, the *backup_name* name can be relative or absolute for file or path name. If the source is an MVS data set, the backup_name must be an MVS data set name. @@ -131,10 +131,10 @@ persistent | **type**: str - comment - If provided, this is used as a comment that surrounds the command in the *persistent/data_store* + marker + If provided, this is used as a marker that surrounds the command in the *persistent/name* - Comments are used to encapsulate the *persistent/data_store* entry such that they can easily be understood and located. + Comments are used to encapsulate the *persistent/name* entry such that they can easily be understood and located. | **required**: False | **type**: list @@ -334,8 +334,8 @@ Examples fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) - comment: For Tape2 project + name: SYS1.PARMLIB(BPXPRMAA) + marker: For Tape2 project - name: Mount a filesystem and record change in BPXPRMAA after backing up to BPXPRMAB. zos_mount: @@ -344,10 +344,10 @@ Examples fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) + name: SYS1.PARMLIB(BPXPRMAA) backup: true backup_name: SYS1.PARMLIB(BPXPRMAB) - comment: For Tape2 project + marker: For Tape2 project - name: Mount a filesystem ignoring uid/gid values. zos_mount: @@ -454,7 +454,7 @@ persistent | **returned**: always | **type**: dict - data_store + name The persistent store name where the mount was written to. | **returned**: always @@ -479,8 +479,8 @@ persistent | **type**: str | **sample**: SYS1.FILESYS(PRMAABAK) - comment - The text that was used in markers around the *Persistent/data_store* entry. + marker + The text that was used in markers around the *Persistent/name* entry. | **returned**: always | **type**: list @@ -494,6 +494,15 @@ persistent ] ] + state + The state of the persistent entry in the persistent data set. + + Possible values are ``added`` and ``removed``. + + | **returned**: always + | **type**: str + | **sample**: added + unmount_opts Describes how the unmount is to be performed. diff --git a/docs/source/modules/zos_operator.rst b/docs/source/modules/zos_operator.rst index 8710256f74..de7bf58ce8 100644 --- a/docs/source/modules/zos_operator.rst +++ b/docs/source/modules/zos_operator.rst @@ -51,20 +51,29 @@ verbose | **default**: False -wait_time_s +wait_time Set maximum time in seconds to wait for the commands to execute. When set to 0, the system default is used. This option is helpful on a busy system requiring more time to execute commands. - Setting *wait* can instruct if execution should wait the full *wait_time_s*. + Setting *wait* can instruct if execution should wait the full *wait_time*. | **required**: False | **type**: int | **default**: 1 +time_unit + Set the ``wait_time`` unit of time, which can be ``s`` (seconds) or ``cs`` (centiseconds). + + | **required**: False + | **type**: str + | **default**: s + | **choices**: s, cs + + case_sensitive If ``true``, the command will not be converted to uppercase before execution. Instead, the casing will be preserved just as it was written in a task. @@ -111,12 +120,18 @@ Examples - name: Execute operator command to show jobs, always waiting 5 seconds for response zos_operator: cmd: 'd a,all' - wait_time_s: 5 + wait_time: 5 - name: Display the system symbols and associated substitution texts. zos_operator: cmd: 'D SYMBOLS' + - name: Execute an operator command to show device status and allocation wait 10 centiseconds. + zos_operator: + cmd: 'd u' + wait_time : 10 + time_unit : 'cs' + @@ -150,7 +165,7 @@ cmd | **sample**: d u,all elapsed - The number of seconds that elapsed waiting for the command to complete. + The number of seconds or centiseconds that elapsed waiting for the command to complete. | **returned**: always | **type**: float @@ -160,13 +175,20 @@ elapsed 51.53 -wait_time_s - The maximum time in seconds to wait for the commands to execute. +wait_time + The maximum time in the time_unit set to wait for the commands to execute. | **returned**: always | **type**: int | **sample**: 5 +time_unit + The time unit set for wait_time. + + | **returned**: always + | **type**: str + | **sample**: s + content The resulting text from the command submitted. diff --git a/docs/source/modules/zos_operator_action_query.rst b/docs/source/modules/zos_operator_action_query.rst index 350f87266b..b69be4d463 100644 --- a/docs/source/modules/zos_operator_action_query.rst +++ b/docs/source/modules/zos_operator_action_query.rst @@ -37,7 +37,7 @@ system | **type**: str -message_id +msg_id Return outstanding messages requiring operator action awaiting a reply for a particular message identifier. If the message identifier is not specified, all outstanding messages for all message identifiers are returned. @@ -59,7 +59,7 @@ job_name | **type**: str -message_filter +msg_filter Return outstanding messages requiring operator action awaiting a reply that match a regular expression (regex) filter. If the message filter is not specified, all outstanding messages are returned regardless of their content. @@ -69,7 +69,7 @@ message_filter filter - Specifies the substring or regex to match to the outstanding messages, see *use_regex*. + Specifies the substring or regex to match to the outstanding messages, see *literal*. All special characters in a filter string that are not a regex are escaped. @@ -81,16 +81,16 @@ message_filter | **type**: str - use_regex + literal Indicates that the value for *filter* is a regex or a string to match. - If False, the module assumes that *filter* is not a regex and matches the *filter* substring on the outstanding messages. + If False, the module creates a regex from the *filter* string and matches it to the outstanding messages. - If True, the module creates a regex from the *filter* string and matches it to the outstanding messages. + If True, the module assumes that *filter* is not a regex and matches the *filter* substring on the outstanding messages. | **required**: False | **type**: bool - | **default**: False + | **default**: True @@ -126,11 +126,11 @@ Examples - name: Display all outstanding messages whose message id begin with dsi* zos_operator_action_query: - message_id: dsi* + msg_id: dsi* - name: Display all outstanding messages that have the text IMS READY in them zos_operator_action_query: - message_filter: + msg_filter: filter: IMS READY - name: Display all outstanding messages where the job name begins with 'mq', @@ -138,11 +138,11 @@ Examples pattern 'IMS' zos_operator_action_query: job_name: mq* - message_id: dsi* + msg_id: dsi* system: mv29 - message_filter: + msg_filter: filter: ^.*IMS.*$ - use_regex: true + literal: true @@ -172,14 +172,14 @@ changed count The total number of outstanding messages. - | **returned**: on success + | **returned**: always | **type**: int | **sample**: 12 actions The list of the outstanding messages. - | **returned**: success + | **returned**: always | **type**: list | **elements**: dict | **sample**: @@ -190,8 +190,8 @@ actions { "job_id": "STC01537", "job_name": "IM5HCONN", - "message_id": "HWSC0000I", - "message_text": "*399 HWSC0000I *IMS CONNECT READY* IM5HCONN", + "msg_id": "HWSC0000I", + "msg_txt": "*399 HWSC0000I *IMS CONNECT READY* IM5HCONN", "number": "001", "system": "MV27", "type": "R" @@ -199,8 +199,8 @@ actions { "job_id": "STC01533", "job_name": "IM5HCTRL", - "message_id": "DFS3139I", - "message_text": "*400 DFS3139I IMS INITIALIZED, AUTOMATIC RESTART PROCEEDING IM5H", + "msg_id": "DFS3139I", + "msg_txt": "*400 DFS3139I IMS INITIALIZED, AUTOMATIC RESTART PROCEEDING IM5H", "number": "002", "system": "MV27", "type": "R" @@ -235,8 +235,8 @@ actions | **type**: str | **sample**: STC01537 - message_text - Content of the outstanding message requiring operator action awaiting a reply. If *message_filter* is set, *message_text* will be filtered accordingly. + msg_txt + Content of the outstanding message requiring operator action awaiting a reply. If *msg_filter* is set, *msg_txt* will be filtered accordingly. | **returned**: success | **type**: str @@ -249,7 +249,7 @@ actions | **type**: str | **sample**: IM5HCONN - message_id + msg_id Message identifier for outstanding message requiring operator action awaiting a reply. | **returned**: success diff --git a/docs/source/modules/zos_started_task.rst b/docs/source/modules/zos_started_task.rst new file mode 100644 index 0000000000..bc643911f1 --- /dev/null +++ b/docs/source/modules/zos_started_task.rst @@ -0,0 +1,466 @@ + +:github_url: https://github.com/ansible-collections/ibm_zos_core/blob/dev/plugins/modules/zos_started_task.py + +.. _zos_started_task_module: + + +zos_started_task -- Perform operations on started tasks. +======================================================== + + + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- +- start, display, modify, cancel, force and stop a started task + + + + + +Parameters +---------- + + +arm + *arm* indicates to execute normal task termination routines without causing address space destruction. + + Only applicable when *state* is ``forced``, otherwise ignored. + + | **required**: False + | **type**: bool + + +armrestart + Indicates that the batch job or started task should be automatically restarted after CANCEL or FORCE completes, if it is registered as an element of the automatic restart manager. If the job or task is not registered or if you do not specify this parameter, MVS will not automatically restart the job or task. + + Only applicable when *state* is ``cancelled`` or ``forced``, otherwise ignored. + + | **required**: False + | **type**: bool + + +asidx + When *state* is ``cancelled``, ``stopped`` or ``forced``, *asidx* is the hexadecimal address space identifier of the work unit you want to cancel, stop or force. + + Only applicable when *state* is ``stopped``, ``cancelled``, or ``forced``, otherwise ignored. + + | **required**: False + | **type**: str + + +dump + Whether to perform a dump. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) depends on the JCL for the job. + + Only applicable when *state* is ``cancelled``, otherwise ignored. + + | **required**: False + | **type**: bool + + +identifier_name + Option *identifier_name* is the name that identifies the task. This name can be up to 8 characters long. The first character must be alphabetical. + + | **required**: False + | **type**: str + + +job_account + Specifies accounting data in the JCL JOB statement for the started task. If the source JCL already had accounting data, the value that is specified on this parameter overrides it. + + Only applicable when *state* is ``started``, otherwise ignored. + + | **required**: False + | **type**: str + + +job_name + When *state* is started, this is the name which should be assigned to a started task while starting it. If *job_name* is not specified, then *member_name* is used as job's name. + + When *state* is ``displayed``, ``modified``, ``cancelled``, ``stopped``, or ``forced``, *job_name* is the started task name. + + | **required**: False + | **type**: str + + +keyword_parameters + Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged procedure. The maximum length of each keyword=option pair is 66 characters. No individual value within this field can be longer than 44 characters in length. + + Only applicable when *state* is ``started``, otherwise ignored. + + | **required**: False + | **type**: dict + + +member_name + Name of a member of a partitioned data set that contains the source JCL for the task to be started. The member can be either a job or a cataloged procedure. + + Only applicable when *state* is ``started``, otherwise ignored. + + | **required**: False + | **type**: str + + +parameters + Program parameters passed to the started program. + + Only applicable when *state* is ``started`` or ``modified``, otherwise ignored. + + | **required**: False + | **type**: list + | **elements**: str + + +reus_asid + When *reus_asid* is ``True`` and REUSASID(YES) is specified in the DIAGxx parmlib member, a reusable ASID is assigned to the address space created by the START command. If *reus_asid* is not specified or REUSASID(NO) is specified in DIAGxx, an ordinary ASID is assigned. + + Only applicable when *state* is ``started``, otherwise ignored. + + | **required**: False + | **type**: bool + + +state + *state* should be the desired state of the started task after the module is executed. + + If *state* is ``started`` and the respective member is not present on the managed node, then error will be thrown with ``rc=1``, ``changed=false`` and *stderr* which contains error details. + + If *state* is ``cancelled``, ``modified``, ``displayed``, ``stopped`` or ``forced`` and the started task is not running on the managed node, then error will be thrown with ``rc=1``, ``changed=false`` and *stderr* contains error details. + + If *state* is ``displayed`` and the started task is running, then the module will return the started task details along with ``changed=true``. + + | **required**: True + | **type**: str + | **choices**: started, displayed, modified, cancelled, stopped, forced + + +subsystem + The name of the subsystem that selects the task for processing. The name must be 1-4 characters long, which are defined in the IEFSSNxx parmlib member, and the subsystem must be active. + + Only applicable when *state* is ``started``, otherwise ignored. + + | **required**: False + | **type**: str + + +task_id + The started task id starts with STC. + + Only applicable when *state* is ``displayed``, ``modified``, ``cancelled``, ``stopped``, or ``forced``, otherwise ignored. + + | **required**: False + | **type**: str + + +userid + The user ID of the time-sharing user you want to cancel or force. + + Only applicable when *state* is ``cancelled`` or ``forced``, otherwise ignored. + + | **required**: False + | **type**: str + + +verbose + When ``verbose=true``, the module will return system logs that describe the task's execution. This option can return a big response depending on system load, also it could surface other program's activity. + + | **required**: False + | **type**: bool + | **default**: False + + +wait_time + Total time that the module will wait for a submitted task, measured in seconds. The time begins when the module is executed on the managed node. Default value of 0 means to wait the default amount of time supported by the opercmd utility. + + | **required**: False + | **type**: int + | **default**: 0 + + + + +Attributes +---------- +action + | **support**: none + | **description**: Indicates this has a corresponding action plugin so some parts of the options can be executed on the controller. +async + | **support**: full + | **description**: Supports being used with the ``async`` keyword. +check_mode + | **support**: full + | **description**: Can run in check_mode and return changed status prediction without modifying target. If not supported, the action will be skipped. + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + - name: Start a started task using a member in a partitioned data set. + zos_started_task: + state: "started" + member: "PROCAPP" + - name: Start a started task using a member name and giving it an identifier. + zos_started_task: + state: "started" + member: "PROCAPP" + identifier: "SAMPLE" + - name: Start a started task using both a member and a job name. + zos_started_task: + state: "started" + member: "PROCAPP" + job_name: "SAMPLE" + - name: Start a started task and enable verbose output. + zos_started_task: + state: "started" + member: "PROCAPP" + job_name: "SAMPLE" + verbose: True + - name: Start a started task specifying the subsystem and enabling a reusable ASID. + zos_started_task: + state: "started" + member: "PROCAPP" + subsystem: "MSTR" + reus_asid: "YES" + - name: Display a started task using a started task name. + zos_started_task: + state: "displayed" + task_name: "PROCAPP" + - name: Display a started task using a started task id. + zos_started_task: + state: "displayed" + task_id: "STC00012" + - name: Display all started tasks that begin with an s using a wildcard. + zos_started_task: + state: "displayed" + task_name: "s*" + - name: Display all started tasks. + zos_started_task: + state: "displayed" + task_name: "all" + - name: Cancel a started task using task name. + zos_started_task: + state: "cancelled" + task_name: "SAMPLE" + - name: Cancel a started task using a started task id. + zos_started_task: + state: "cancelled" + task_id: "STC00093" + - name: Cancel a started task using it's task name and ASID. + zos_started_task: + state: "cancelled" + task_name: "SAMPLE" + asidx: 0014 + - name: Modify a started task's parameters. + zos_started_task: + state: "modified" + task_name: "SAMPLE" + parameters: ["XX=12"] + - name: Modify a started task's parameters using a started task id. + zos_started_task: + state: "modified" + task_id: "STC00034" + parameters: ["XX=12"] + - name: Stop a started task using it's task name. + zos_started_task: + state: "stopped" + task_name: "SAMPLE" + - name: Stop a started task using a started task id. + zos_started_task: + state: "stopped" + task_id: "STC00087" + - name: Stop a started task using it's task name, identifier and ASID. + zos_started_task: + state: "stopped" + task_name: "SAMPLE" + identifier: "SAMPLE" + asidx: 00A5 + - name: Force a started task using it's task name. + zos_started_task: + state: "forced" + task_name: "SAMPLE" + - name: Force a started task using it's task id. + zos_started_task: + state: "forced" + task_id: "STC00065" + + + + + + + + + + +Return Values +------------- + + +changed + True if the state was changed, otherwise False. + + | **returned**: always + | **type**: bool + +cmd + Command executed via opercmd. + + | **returned**: changed + | **type**: str + | **sample**: S SAMPLE + +msg + Failure or skip message returned by the module. + + | **returned**: failure or skipped + | **type**: str + | **sample**: Command parameters are invalid. + +rc + The return code is 0 when command executed successfully. + + The return code is 1 when opercmd throws any error. + + The return code is 4 when task_id format is invalid. + + The return code is 5 when any parameter validation failed. + + The return code is 8 when started task is not found using task_id. + + | **returned**: changed + | **type**: int + +state + The final state of the started task, after execution. + + | **returned**: success + | **type**: str + | **sample**: S SAMPLE + +stderr + The STDERR from the command, may be empty. + + | **returned**: failure + | **type**: str + | **sample**: An error has occurred. + +stderr_lines + List of strings containing individual lines from STDERR. + + | **returned**: failure + | **type**: list + | **sample**: + + .. code-block:: json + + [ + "An error has occurred" + ] + +stdout + The STDOUT from the command, may be empty. + + | **returned**: success + | **type**: str + | **sample**: ISF031I CONSOLE OMVS0000 ACTIVATED. + +stdout_lines + List of strings containing individual lines from STDOUT. + + | **returned**: success + | **type**: list + | **sample**: + + .. code-block:: json + + [ + "Allocation to SYSEXEC completed." + ] + +tasks + The output information for a list of started tasks matching specified criteria. + + If no started task is found then this will return empty. + + | **returned**: success + | **type**: list + | **elements**: dict + + asidx + Address space identifier (ASID), in hexadecimal. + + | **type**: str + | **sample**: 44 + + cpu_time + The processor time used by the address space, including the initiator. This time does not include SRB time. + + cpu_time has one of following formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. sss.tttS when time is less than 1000 seconds hh.mm.ss when time is at least 1000 seconds, but less than 100 hours hhhhh.mm when time is at least 100 hours ******** when time exceeds 100000 hours NOTAVAIL when the TOD clock is not working + + | **type**: str + | **sample**: 000.008S + + elapsed_time + For address spaces other than system address spaces, the elapsed time since job select time. + + For system address spaces created before master scheduler initialization, the elapsed time since master scheduler initialization. + + For system address spaces created after master scheduler initialization, the elapsed time since system address space creation. + + elapsed_time has one of following formats, where ttt is milliseconds, sss or ss is seconds, mm is minutes, and hh or hhhhh is hours. sss.tttS when time is less than 1000 seconds hh.mm.ss when time is at least 1000 seconds, but less than 100 hours hhhhh.mm when time is at least 100 hours ******** when time exceeds 100000 hours NOTAVAIL when the TOD clock is not working + + | **type**: str + | **sample**: 812.983S + + started_time + The time when the started task started. + + | **type**: str + | **sample**: 2025-09-11 18:21:50.293644+00:00 + + task_id + The started task id. + + | **type**: str + | **sample**: STC00018 + + task_identifier + The name of a system address space. + + The name of a step, for a job or attached APPC transaction program attached by an initiator. + + The identifier of a task created by the START command. + + The name of a step that called a cataloged procedure. + + ``STARTING`` if initiation of a started job, system task, or attached APPC transaction program is incomplete. + + ``*MASTER*`` for the master address space. + + The name of an initiator address space. + + | **type**: str + | **sample**: SPROC + + task_name + The name of the started task. + + | **type**: str + | **sample**: SAMPLE + + +verbose_output + If ``verbose=true``, the system logs related to the started task executed state will be shown. + + | **returned**: success + | **type**: str + | **sample**: NC0000000 ZOSMACHINE 25240 12:40:30.15 OMVS0000 00000210.... + diff --git a/docs/source/modules/zos_unarchive.rst b/docs/source/modules/zos_unarchive.rst index eec87c3eca..c1f35bf46a 100644 --- a/docs/source/modules/zos_unarchive.rst +++ b/docs/source/modules/zos_unarchive.rst @@ -60,7 +60,7 @@ format | **choices**: bz2, gz, tar, zip, terse, xmit, pax - format_options + options Options specific to a compression format. | **required**: False @@ -89,7 +89,7 @@ format dest_volumes - When *use_adrdssu=True*, specify the volume the data sets will be written to. + When *adrdssu=True*, specify the volume the data sets will be written to. If no volume is specified, storage management rules will be used to determine the volume where the file will be unarchived. @@ -424,14 +424,14 @@ Examples zos_unarchive: src: "./files/archive_folder_test.tar" format: - name: tar + type: tar # use include - name: Unarchive a bzip file selecting only a file to unpack. zos_unarchive: src: "/tmp/test.bz2" format: - name: bz2 + type: bz2 include: - 'foo.txt' @@ -440,7 +440,7 @@ Examples zos_unarchive: src: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse exclude: - USER.ARCHIVE.TEST1 - USER.ARCHIVE.TEST2 @@ -450,16 +450,16 @@ Examples zos_unarchive: src: "USER.ARCHIVE(0)" format: - name: terse + type: terse # List option - name: List content from XMIT zos_unarchive: src: "USER.ARCHIVE.RESULT.XMIT" format: - name: xmit - format_options: - use_adrdssu: true + type: xmit + options: + adrdssu: true list: true # Encoding example diff --git a/galaxy.yml b/galaxy.yml index 892cc531ea..5efbdb52ec 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -66,7 +66,7 @@ issues: https://github.com/ansible-collections/ibm_zos_core/issues # Ignore files and directories matching the following patterns build_ignore: - - '*.tar.gz' + - "*.tar.gz" - __pycache__ - .cache - .DS_Store @@ -99,5 +99,5 @@ build_ignore: - tests/sanity/ignore-2.14.txt - venv* - ansible_collections - - '*.log' - - '*.sh' + - "*.log" + - "*.sh" diff --git a/meta/ibm_zos_core_meta.yml b/meta/ibm_zos_core_meta.yml index 5256a8b9a0..e8dc7119b2 100644 --- a/meta/ibm_zos_core_meta.yml +++ b/meta/ibm_zos_core_meta.yml @@ -1,10 +1,8 @@ name: ibm_zos_core version: "1.15.0" managed_requirements: - - - name: "IBM Open Enterprise SDK for Python" - version: ">=3.12" - - - name: "Z Open Automation Utilities" - version: - - ">=1.3.5" + - name: "IBM Open Enterprise SDK for Python" + version: ">=3.12" + - name: "Z Open Automation Utilities" + version: + - ">=1.3.5" diff --git a/plugins/action/zos_copy.py b/plugins/action/zos_copy.py index 1c19b31de6..7745403b9c 100644 --- a/plugins/action/zos_copy.py +++ b/plugins/action/zos_copy.py @@ -51,12 +51,12 @@ def run(self, tmp=None, task_vars=None): dest = task_args.get('dest', None) content = task_args.get('content', None) - force = _process_boolean(task_args.get('force'), default=True) + replace = _process_boolean(task_args.get('replace'), default=True) backup = _process_boolean(task_args.get('backup'), default=False) local_follow = _process_boolean(task_args.get('local_follow'), default=False) remote_src = _process_boolean(task_args.get('remote_src'), default=False) - is_binary = _process_boolean(task_args.get('is_binary'), default=False) - force_lock = _process_boolean(task_args.get('force_lock'), default=False) + binary = _process_boolean(task_args.get('binary'), default=False) + force = _process_boolean(task_args.get('force'), default=False) executable = _process_boolean(task_args.get('executable'), default=False) asa_text = _process_boolean(task_args.get('asa_text'), default=False) ignore_sftp_stderr = _process_boolean(task_args.get("ignore_sftp_stderr"), default=True) @@ -104,7 +104,7 @@ def run(self, tmp=None, task_vars=None): msg = "'src' or 'content' is required" return self._exit_action(result, msg, failed=True) - if encoding and is_binary: + if encoding and binary: msg = "The 'encoding' parameter is not valid for binary transfer" return self._exit_action(result, msg, failed=True) @@ -112,8 +112,8 @@ def run(self, tmp=None, task_vars=None): msg = "Backup file provided but 'backup' parameter is False" return self._exit_action(result, msg, failed=True) - if is_binary and asa_text: - msg = "Both 'is_binary' and 'asa_text' are True. Unable to copy binary data as an ASA text file." + if binary and asa_text: + msg = "Both 'binary' and 'asa_text' are True. Unable to copy binary data as an ASA text file." return self._exit_action(result, msg, failed=True) if executable and asa_text: @@ -130,9 +130,9 @@ def run(self, tmp=None, task_vars=None): msg = "Cannot specify 'mode', 'owner' or 'group' for MVS destination" return self._exit_action(result, msg, failed=True) - if force_lock: + if force: display.warning( - msg="Using force_lock uses operations that are subject to race conditions and can lead to data loss, use with caution.") + msg="Using force uses operations that are subject to race conditions and can lead to data loss, use with caution.") template_dir = None if not remote_src: @@ -293,7 +293,7 @@ def run(self, tmp=None, task_vars=None): path = os.path.normpath(f"{self.tmp_dir}/ansible-zos-copy") rm_res = self._connection.exec_command(f"rm -rf {path}*") - if copy_res.get("note") and not force: + if copy_res.get("note") and not replace: result["note"] = copy_res.get("note") return result diff --git a/plugins/action/zos_fetch.py b/plugins/action/zos_fetch.py index bdbb82e826..b8b51363bb 100644 --- a/plugins/action/zos_fetch.py +++ b/plugins/action/zos_fetch.py @@ -39,7 +39,7 @@ display = Display() -def _update_result(result, src, dest, ds_type="USS", is_binary=False): +def _update_result(result, src, dest, ds_type="USS", binary=False): """ Helper function to update output result with the provided values """ data_set_types = { "PS": "Sequential", @@ -57,10 +57,10 @@ def _update_result(result, src, dest, ds_type="USS", is_binary=False): updated_result = dict((k, v) for k, v in result.items()) updated_result.update( { - "file": src, + "src": src, "dest": dest, "data_set_type": data_set_types[ds_type], - "is_binary": is_binary, + "binary": binary, } ) return updated_result @@ -121,7 +121,8 @@ def run(self, tmp=None, task_vars=None): dest = self._task.args.get('dest') encoding = self._task.args.get('encoding', None) flat = _process_boolean(self._task.args.get('flat'), default=False) - is_binary = _process_boolean(self._task.args.get('is_binary')) + fail_on_missing = _process_boolean(self._task.args.get('fail_on_missing'), default=True) + binary = _process_boolean(self._task.args.get('binary')) ignore_sftp_stderr = _process_boolean( self._task.args.get("ignore_sftp_stderr"), default=True ) @@ -186,29 +187,55 @@ def run(self, tmp=None, task_vars=None): task_vars=task_vars ) ds_type = fetch_res.get("ds_type") - src = fetch_res.get("file") + src = fetch_res.get("src") remote_path = fetch_res.get("remote_path") - - if fetch_res.get("msg"): - result["msg"] = fetch_res.get("msg") + # Create a dictionary that is a schema for the return values + result = dict( + src="", + dest="", + binary=False, + checksum="", + changed=False, + data_set_type="", + msg="", + stdout="", + stderr="", + stdout_lines=[], + stderr_lines=[], + rc=0, + encoding=new_module_args.get("encoding"), + ) + # Populate it with the modules response + result["src"] = fetch_res.get("src") + result["dest"] = fetch_res.get("dest") + result["binary"] = fetch_res.get("binary", False) + result["checksum"] = fetch_res.get("checksum") + result["changed"] = fetch_res.get("changed", False) + result["data_set_type"] = fetch_res.get("data_set_type") + result["msg"] = fetch_res.get("msg") + result["stdout"] = fetch_res.get("stdout") + result["stderr"] = fetch_res.get("stderr") + result["stdout_lines"] = fetch_res.get("stdout_lines") + result["stderr_lines"] = fetch_res.get("stderr_lines") + result["rc"] = fetch_res.get("rc", 0) + result["encoding"] = fetch_res.get("encoding") + + if fetch_res.get("failed", False): result["stdout"] = fetch_res.get("stdout") or fetch_res.get( "module_stdout" ) result["stderr"] = fetch_res.get("stderr") or fetch_res.get( "module_stderr" ) - result["stdout_lines"] = fetch_res.get("stdout_lines") - result["stderr_lines"] = fetch_res.get("stderr_lines") - result["rc"] = fetch_res.get("rc") result["failed"] = True return result - - elif fetch_res.get("note"): - result["note"] = fetch_res.get("note") + if "No data was fetched." in result["msg"]: + if fail_on_missing: + result["failed"] = True return result except Exception as err: - result["msg"] = "Failure during module execution" + result["msg"] = f"Failure during module execution {msg}" result["stderr"] = str(err) result["stderr_lines"] = str(err).splitlines() result["failed"] = True @@ -229,7 +256,6 @@ def run(self, tmp=None, task_vars=None): # For instance: If src is: USER.TEST.PROCLIB(DATA) # # and dest is: /tmp/, then updated dest would be /tmp/DATA # # ********************************************************** # - if os.path.sep not in self._connection._shell.join_path("a", ""): src = self._connection._shell._unquote(src) source_local = src.replace("\\", "/") @@ -290,15 +316,11 @@ def run(self, tmp=None, task_vars=None): try: if ds_type in SUPPORTED_DS_TYPES: if ds_type == "PO" and os.path.isfile(dest) and not fetch_member: - result[ - "msg" - ] = "Destination must be a directory to fetch a partitioned data set" + result["msg"] = "Destination must be a directory to fetch a partitioned data set" result["failed"] = True return result if ds_type == "GDG" and os.path.isfile(dest): - result[ - "msg" - ] = "Destination must be a directory to fetch a generation data group" + result["msg"] = "Destination must be a directory to fetch a generation data group" result["failed"] = True return result @@ -309,9 +331,10 @@ def run(self, tmp=None, task_vars=None): ignore_stderr=ignore_sftp_stderr, ) if fetch_content.get("msg"): - return fetch_content + result.update(fetch_content) + return result - if validate_checksum and ds_type != "GDG" and ds_type != "PO" and not is_binary: + if validate_checksum and ds_type != "GDG" and ds_type != "PO" and not binary: new_checksum = _get_file_checksum(dest) result["changed"] = local_checksum != new_checksum result["checksum"] = new_checksum @@ -339,7 +362,7 @@ def run(self, tmp=None, task_vars=None): finally: self._remote_cleanup(remote_path, ds_type, encoding) - return _update_result(result, src, dest, ds_type, is_binary=is_binary) + return _update_result(result, src, dest, ds_type, binary=binary) def _transfer_remote_content( self, dest, remote_path, src_type, ignore_stderr=False diff --git a/plugins/action/zos_job_submit.py b/plugins/action/zos_job_submit.py index 1f8cdf465a..944f5af212 100644 --- a/plugins/action/zos_job_submit.py +++ b/plugins/action/zos_job_submit.py @@ -45,16 +45,16 @@ def run(self, tmp=None, task_vars=None): module_args = self._task.args.copy() use_template = _process_boolean(module_args.get("use_template")) - location = module_args.get("location") - if use_template and location != "local": + remote_src = module_args.get("remote_src") + if use_template and remote_src: result.update(dict( failed=True, changed=False, - msg="Use of Jinja2 templates is only valid for local files. Location is set to '{0}' but should be 'local'".format(location) + msg="Use of Jinja2 templates is only valid for local files. remote_src is set to '{0}' but should be False".format(remote_src) )) return result - if location == "local": + if not remote_src: source = self._task.args.get("src", None) @@ -150,7 +150,7 @@ def run(self, tmp=None, task_vars=None): src=source_full, dest=dest_file, mode="0666", - force=True, + replace=True, encoding=module_args.get('encoding'), remote_src=False, ) diff --git a/plugins/action/zos_script.py b/plugins/action/zos_script.py index d3d348fa9c..11beb2bd08 100644 --- a/plugins/action/zos_script.py +++ b/plugins/action/zos_script.py @@ -92,8 +92,8 @@ def run(self, tmp=None, task_vars=None): copy_module_args = dict( src=script_path, dest=tempfile_path, - force=True, - is_binary=False, + replace=True, + binary=False, encoding=module_args.get('encoding'), use_template=module_args.get('use_template', False), template_parameters=module_args.get('template_parameters', dict()) diff --git a/plugins/action/zos_unarchive.py b/plugins/action/zos_unarchive.py index 9f5d9d347f..bf4fe17a70 100644 --- a/plugins/action/zos_unarchive.py +++ b/plugins/action/zos_unarchive.py @@ -63,7 +63,7 @@ def run(self, tmp=None, task_vars=None): source = module_args.get("src") force = _process_boolean(module_args.get("force")) format = self._task.args.get("format") - format_name = format.get("name") + format_type = format.get("type") copy_module_args = dict() dest_data_set = format.get("dest_data_set") dest = "" @@ -71,12 +71,12 @@ def run(self, tmp=None, task_vars=None): source = os.path.expanduser(source) source = os.path.realpath(source) - if format_name in USS_SUPPORTED_FORMATS: + if format_type in USS_SUPPORTED_FORMATS: tmp_files = dest = self._execute_module( module_name="tempfile", module_args={}, task_vars=task_vars, ).get("path") - uss_format = format_name - elif format_name in MVS_SUPPORTED_FORMATS: + uss_format = format_type + elif format_type in MVS_SUPPORTED_FORMATS: if dest_data_set is None: dest_data_set = dict() tmp_hlq = module_args.get("tmp_hlq") if module_args.get("tmp_hlq") is not None else "" @@ -90,9 +90,9 @@ def run(self, tmp=None, task_vars=None): dest = cmd_res.get("stdout") if dest_data_set.get("space_primary") is None: dest_data_set.update(space_primary=5, space_type="m") - if format_name == 'terse': + if format_type == 'terse': dest_data_set.update(type='seq', record_format='fb', record_length=1024) - if format_name == 'xmit': + if format_type == 'xmit': dest_data_set.update(type='seq', record_format='fb', record_length=80) copy_module_args.update( @@ -100,8 +100,8 @@ def run(self, tmp=None, task_vars=None): src=source, dest=dest, dest_data_set=dest_data_set, - force=force, - is_binary=True, + replace=force, + binary=True, ) ) copy_task = self._task.copy() diff --git a/plugins/module_utils/better_arg_parser.py b/plugins/module_utils/better_arg_parser.py index e5dd8e975c..f4a52eee09 100644 --- a/plugins/module_utils/better_arg_parser.py +++ b/plugins/module_utils/better_arg_parser.py @@ -152,6 +152,7 @@ def __init__(self, arg_name, contents, resolved_args, arg_defs): # TODO: determine if we should optionally allow top-level args to be passed self.type_handlers = { "dict": self._dict_type, + "basic_dict": self._basic_dict_type, "list": self._list_type, "str": self._str_type, "bool": self._bool_type, @@ -160,6 +161,8 @@ def __init__(self, arg_name, contents, resolved_args, arg_defs): "data_set": self._data_set_type, "data_set_base": self._data_set_base_type, "data_set_member": self._data_set_member_type, + "member_name": self._member_name_type, + "identifier_name": self._identifier_name_type, "qualifier": self._qualifier_type, "qualifier_or_empty": self._qualifier_or_empty_type, "qualifier_pattern": self._qualifier_pattern_type, @@ -252,6 +255,32 @@ def _dict_type(self, contents, resolved_dependencies): self._assert_mutually_exclusive(contents) return contents + def _basic_dict_type(self, contents, resolve_dependencies): + """Resolver for basic dict type arguments. + + Parameters + ---------- + contents : dict + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, + which have already been handled, + for use during current arguments handling operations. + + Returns + ------- + dict + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. + """ + if not isinstance(contents, dict): + raise ValueError('Invalid argument "{0}" for type "dict".'.format(contents)) + return contents + def _str_type(self, contents, resolve_dependencies): """Resolver for str type arguments. @@ -329,6 +358,72 @@ def _bool_type(self, contents, resolve_dependencies): raise ValueError('Invalid argument "{0}" for type "bool".'.format(contents)) return contents + def _member_name_type(self, contents, resolve_dependencies): + """Resolver for PDS/E member name type arguments. This is part of + zos_started_task member name validation. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, + which have already been handled, + for use during current arguments handling operations. + + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. + """ + if not fullmatch( + r"^[A-Z$#@]{1}[A-Z0-9$#@]{0,7}$", + str(contents), + IGNORECASE, + ): + raise ValueError( + 'Invalid argument "{0}" for type "member_name".'.format(contents) + ) + return str(contents) + + def _identifier_name_type(self, contents, resolve_dependencies): + """Resolver for identifier name type arguments. This is part of + zos_started_task identifier name validation. + + Parameters + ---------- + contents : bool + The contents of the argument. + resolved_dependencies : dict + Contains all of the dependencies and their contents, + which have already been handled, + for use during current arguments handling operations. + + Returns + ------- + str + The arguments contents after any necessary operations. + + Raises + ------ + ValueError + When contents is invalid argument type. + """ + if not fullmatch( + r"^[A-Z]{1}[A-Z0-9$#@]{0,7}$", + str(contents), + IGNORECASE, + ): + raise ValueError( + 'Invalid argument "{0}" for type "identifier_name".'.format(contents) + ) + return str(contents) + def _path_type(self, contents, resolve_dependencies): """Resolver for path type arguments. @@ -1108,12 +1203,6 @@ def _add_alias(self, arg_name, arg_aliases=None, aliases=None): aliases = {} arg_aliases.append(arg_name) for alternate_name in arg_aliases: - if aliases.get(alternate_name, arg_name) != arg_name: - raise ValueError( - 'Conflicting aliases "{0}" and "{1}" found for name "{2}"'.format( - aliases.get(alternate_name), alternate_name, arg_name - ) - ) aliases[alternate_name] = arg_name return aliases diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index f5fd194481..63f6bbdd62 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -78,7 +78,7 @@ def _validate_path(path): return parsed_args.get("path") -def copy_uss_mvs(src, dest, is_binary=False): +def copy_uss_mvs(src, dest, binary=False): """Wrapper function for datasets.copy that handles possible exceptions that may occur. @@ -91,7 +91,7 @@ def copy_uss_mvs(src, dest, is_binary=False): Keyword Parameters ------------------ - is_binary : bool + binary : bool Whether to perform a binary copy. Returns @@ -109,7 +109,7 @@ def copy_uss_mvs(src, dest, is_binary=False): "options": "" } - if is_binary: + if binary: copy_args["options"] = "-B" try: @@ -125,7 +125,7 @@ def copy_uss_mvs(src, dest, is_binary=False): return 0, "", "" -def copy_gdg2uss(src, dest, is_binary=False, asa_text=False): +def copy_gdg2uss(src, dest, binary=False, asa_text=False): """Copy a whole GDG to a USS path. Parameters @@ -137,7 +137,7 @@ def copy_gdg2uss(src, dest, is_binary=False, asa_text=False): Keyword Parameters ------------------ - is_binary : bool + binary : bool Whether the file to be copied contains binary data. asa_text : bool Whether the file to be copied contains ASA control @@ -155,7 +155,7 @@ def copy_gdg2uss(src, dest, is_binary=False, asa_text=False): "options": "" } - if is_binary or asa_text: + if binary or asa_text: copy_args["options"] = "-B" for gds in generations: @@ -209,7 +209,7 @@ def copy_vsam_ps(src, dest, tmphlq=None): return rc, out, err -def copy_asa_uss2mvs(src, dest, tmphlq=None, force_lock=False): +def copy_asa_uss2mvs(src, dest, tmphlq=None, force=False): """Copy a file from USS to an ASA sequential data set or PDS/E member. Parameters @@ -220,7 +220,7 @@ def copy_asa_uss2mvs(src, dest, tmphlq=None, force_lock=False): The MVS destination data set or member. tmphlq : str High Level Qualifier for temporary datasets. - force_lock : bool + force : bool Whether to open the destination in SHR mode. Returns @@ -236,7 +236,7 @@ def copy_asa_uss2mvs(src, dest, tmphlq=None, force_lock=False): # Removes escaping to execute this command dest = dest.replace('\\', '') src = src.replace('\\', '') - dest_dsp = "shr" if force_lock else "old" + dest_dsp = "shr" if force else "old" ocopy_cmd = "OCOPY INDD(DSSRC) OUTDD(DSTAR) TEXT" ocopy_dds = { diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 4640697252..80f7add084 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -171,6 +171,7 @@ def _job_not_found(job_id, owner, job_name, dd_name): job_not_found_msg = "with the name {0}".format(job_name.upper()) job = {} + job["job_not_found"] = True job["job_id"] = job_id job["job_name"] = job_name job["subsystem"] = None @@ -179,24 +180,36 @@ def _job_not_found(job_id, owner, job_name, dd_name): job["cpu_time"] = None job["execution_node"] = None job["origin_node"] = None + job["content_type"] = None + job["creation_date"] = None + job["creation_time"] = None + job["execution_time"] = None + job["job_class"] = None + job["svc_class"] = None + job["priority"] = None + job["asid"] = None + job["queue_position"] = None + job["program_name"] = None job["ret_code"] = {} job["ret_code"]["msg"] = None job["ret_code"]["code"] = None job["ret_code"]["msg_code"] = None job["ret_code"]["msg_txt"] = "The job {0} could not be found.".format(job_not_found_msg) + job["steps"] = [] - job["class"] = "" + job["class"] = None - job["ddnames"] = [] + job["dds"] = [] dd = {} - dd["ddname"] = dd_name - dd["record_count"] = "0" - dd["id"] = "" + dd["dd_name"] = dd_name + dd["record_count"] = 0 + dd["id"] = None dd["stepname"] = None - dd["procstep"] = "" - dd["byte_count"] = "0" - job["ddnames"].append(dd) + dd["procstep"] = None + dd["byte_count"] = 0 + dd["content"] = None + job["dds"].append(dd) jobs.append(job) @@ -357,8 +370,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, sysin=Fal job = {} job["job_id"] = entry.job_id job["job_name"] = entry.name - job["subsystem"] = "" - job["system"] = "" + job["subsystem"] = None + job["system"] = None job["owner"] = entry.owner job["cpu_time"] = None job["execution_node"] = None @@ -386,9 +399,9 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, sysin=Fal job["creation_time"] = str(entry.creation_datetime)[12:] job["queue_position"] = entry.queue_position job["program_name"] = entry.program_name - job["class"] = "" - job["ret_code"]["steps"] = [] - job["ddnames"] = [] + job["class"] = None + job["steps"] = [] + job["dds"] = [] job["duration"] = duration if hasattr(entry, "execution_time"): job["execution_time"] = entry.execution_time @@ -450,7 +463,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, sysin=Fal if dd_name not in single_dd["dd_name"]: continue else: - dd["ddname"] = single_dd["dd_name"] + dd["dd_name"] = single_dd["dd_name"] if "records" in single_dd: dd["record_count"] = single_dd["records"] @@ -499,10 +512,10 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, sysin=Fal dd["content"] = tmpcont.split("\n") - job["ret_code"]["steps"].extend(_parse_steps(tmpcont)) + job["steps"].extend(_parse_steps(tmpcont)) - job["ddnames"].append(dd) - if len(job["class"]) < 1: + job["dds"].append(dd) + if job["class"] is None: job["class"] = entry.job_class if job["system"] is None: @@ -551,7 +564,7 @@ def _ddname_pattern(contents, resolve_dependencies): re.IGNORECASE, ): raise ValueError( - 'Invalid argument type for "{0}". Expected "ddname_pattern"'.format( + 'Invalid argument type for "{0}". Expected "dd_name_pattern"'.format( contents ) ) diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 211a1f4789..b6faf31610 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -102,17 +102,18 @@ type: str persistent: description: - - Add/remove persistent entries to or from I(data_set_name) + - Add/remove persistent entries to or from I(target) - C(library) will not be persisted or removed if C(persistent=None) required: False type: dict suboptions: - data_set_name: + target: description: - The data set name used for persisting or removing a C(library) from the APF list. required: True type: str + aliases: [data_set_name] marker: description: - The marker line template. @@ -127,10 +128,10 @@ default: "/* {mark} ANSIBLE MANAGED BLOCK */" backup: description: - - Creates a backup file or backup data set for I(data_set_name), + - Creates a backup file or backup data set for I(target), including the timestamp information to ensure that you retrieve the original APF list - defined in I(data_set_name)". + defined in I(target)". - I(backup_name) can be used to specify a backup file name if I(backup=true). - The backup file name will be return on either success or failure @@ -142,7 +143,7 @@ description: - Specify the USS file name or data set name for the destination backup. - - If the source I(data_set_name) is a USS file or path, the + - If the source I(target) is a USS file or path, the backup_name name must be a file or path name, and the USS file or path must be an absolute path name. - If the source is an MVS data set, the backup_name must be @@ -237,18 +238,18 @@ library: SOME.SEQUENTIAL.DATASET force_dynamic: true persistent: - data_set_name: SOME.PARTITIONED.DATASET(MEM) + target: SOME.PARTITIONED.DATASET(MEM) - name: Remove a library from the APF list and persistence zos_apf: state: absent library: SOME.SEQUENTIAL.DATASET volume: T12345 persistent: - data_set_name: SOME.PARTITIONED.DATASET(MEM) + target: SOME.PARTITIONED.DATASET(MEM) - name: Batch libraries with custom marker, persistence for the APF list zos_apf: persistent: - data_set_name: "SOME.PARTITIONED.DATASET(MEM)" + target: "SOME.PARTITIONED.DATASET(MEM)" marker: "/* {mark} PROG001 USR0010 */" batch: - library: SOME.SEQ.DS1 @@ -283,12 +284,22 @@ check_format> DYNAMIC or STATIC" returned: always type: str +stdout_lines: + description: List of strings containing individual lines from STDOUT. + returned: always + type: list stderr: description: The error messages from ZOAU command apfadm returned: always type: str sample: "BGYSC1310E ADD Error: Dataset COMMON.LINKLIB volume COMN01 is already present in APF list." +stderr_lines: + description: List of strings containing individual lines from STDERR. + returned: always + type: list + sample: ["BGYSC1310E ADD Error: Dataset COMMON.LINKLIB volume COMN01 is already + present in APF list."] rc: description: The return code from ZOAU command apfadm returned: always @@ -310,7 +321,7 @@ from ansible.module_utils._text import to_text from ansible.module_utils.basic import AnsibleModule from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( - better_arg_parser, zoau_version_checker, data_set, backup as Backup) + better_arg_parser, data_set, backup as Backup) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( ZOAUImportError, @@ -436,9 +447,16 @@ def main(): type='dict', required=False, options=dict( - data_set_name=dict( + target=dict( type='str', required=True, + aliases=["data_set_name"], + deprecated_aliases=[ + dict( + name='data_set_name', + version='3.0.0', + collection_name='ibm.ibm_zos_core') + ], ), marker=dict( type='str', @@ -503,7 +521,7 @@ def main(): arg_type='dict', required=False, options=dict( - data_set_name=dict(arg_type='str', required=True), + target=dict(arg_type='str', required=True, aliases=["data_set_name"]), marker=dict(arg_type='str', required=False, default='/* {mark} ANSIBLE MANAGED BLOCK */'), backup=dict(arg_type='bool', default=False), backup_name=dict(arg_type='str', required=False, default=None), @@ -549,7 +567,7 @@ def main(): tmphlq = module.params.get('tmp_hlq') if persistent: - data_set_name = persistent.get('data_set_name') + target = persistent.get('target') backup = persistent.get('backup') marker = persistent.get('marker') if len(marker) > 71: @@ -558,13 +576,13 @@ def main(): if persistent.get('backup_name'): backup = persistent.get('backup_name') del persistent['backup_name'] - result['backup_name'] = backupOper(module, data_set_name, backup, tmphlq) + result['backup_name'] = backupOper(module, target, backup, tmphlq) del persistent['backup'] if state == "present": - persistent['addDataset'] = data_set_name + persistent['addDataset'] = target else: - persistent['delDataset'] = data_set_name - del persistent['data_set_name'] + persistent['delDataset'] = target + del persistent['target'] if operation: ret = zsystem.apf(opt=operation) @@ -580,26 +598,22 @@ def main(): del item['library'] # ignore=true is added so that it's ignoring in case of addition if already present # ignore=true is added so that it's ignoring in case the file is not in apf list while deletion - if zoau_version_checker.is_zoau_version_higher_than("1.3.4"): - ret = zsystem.apf(batch=batch, forceDynamic=force_dynamic, persistent=persistent, ignore=True) - else: - ret = zsystem.apf(batch=batch, forceDynamic=force_dynamic, persistent=persistent) + ret = zsystem.apf(batch=batch, forceDynamic=force_dynamic, persistent=persistent, ignore=True) else: if not library: module.fail_json(msg='library is required') # ignore=true is added so that it's ignoring in case of addition if already present # ignore=true is added so that it's ignoring in case the file is not in apf list while deletion - if zoau_version_checker.is_zoau_version_higher_than("1.3.4"): - ret = zsystem.apf(opt=opt, dsname=library, volume=volume, sms=sms, forceDynamic=force_dynamic, persistent=persistent, ignore=True) - else: - ret = zsystem.apf(opt=opt, dsname=library, volume=volume, sms=sms, forceDynamic=force_dynamic, persistent=persistent) + ret = zsystem.apf(opt=opt, dsname=library, volume=volume, sms=sms, forceDynamic=force_dynamic, persistent=persistent, ignore=True) operOut = ret.stdout_response operErr = ret.stderr_response operRc = ret.rc result['stderr'] = operErr + result['stderr_lines'] = operErr.split("\n") result['rc'] = operRc result['stdout'] = operOut + result['stdout_lines'] = operOut.split("\n") if operation != 'list' and operRc == 0: if operErr.strip(): @@ -631,6 +645,7 @@ def main(): except re.error: module.exit_json(**result) result['stdout'] = ds_list + result['stdout_lines'] = ds_list.split("\n") else: """ ZOAU 1.3 changed the output from apf, having the data set list inside a new "data" tag. @@ -638,6 +653,7 @@ def main(): """ try: result['stdout'] = json.dumps(data.get("data")) + result['stdout_lines'] = json.dumps(data.get("data")).split("\n") except Exception as e: err_msg = "An exception occurred. See stderr for more details." module.fail_json(msg=err_msg, stderr=to_text(e), rc=operErr) diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index a02ec00a59..158250ad4a 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -51,7 +51,7 @@ type: dict required: false suboptions: - name: + type: description: - The compression format to use. type: str @@ -65,27 +65,27 @@ - terse - xmit - pax - format_options: + aliases: [ name ] + options: description: - Options specific to a compression format. type: dict required: false + aliases: [ format_options ] suboptions: - terse_pack: + spack: description: - Compression option for use with the terse format, - I(name=terse). + I(type=terse). - Pack will compress records in a data set so that the output results in lossless data compression. - Spack will compress records in a data set so the output results in complex data compression. - Spack will produce smaller output and take approximately 3 times longer than pack compression. - type: str + type: bool required: false - choices: - - pack - - spack + default: true xmit_log_data_set: description: - Provide the name of a data set to store xmit log output. @@ -97,7 +97,7 @@ - When providing the I(xmit_log_data_set) name, ensure there is adequate space. type: str - use_adrdssu: + adrdssu: description: - If set to true, the C(zos_archive) module will use Data Facility Storage Management Subsystem data set services @@ -105,6 +105,7 @@ portable format before using C(xmit) or C(terse). type: bool default: false + aliases: [ use_adrdssu ] dest: description: - The remote absolute path or data set where the archive should be @@ -358,7 +359,7 @@ retrieve to the controller and then zos_copy or zos_unarchive for copying to a remote or send to the remote and then unpack the archive respectively. - - When packing and using C(use_adrdssu) flag the module will take up to two + - When packing and using C(adrdssu) flag the module will take up to two times the space indicated in C(dest_data_set). - tar, zip, bz2 and pax are archived using python C(tarfile) library which uses the latest version available for each format, for compatibility when @@ -378,7 +379,7 @@ src: /tmp/archive/foo.txt dest: /tmp/archive/foo_archive_test.tar format: - name: tar + type: tar # Archive multiple files - name: Archive list of files into a zip @@ -388,7 +389,7 @@ - /tmp/archive/bar.txt dest: /tmp/archive/foo_bar_archive_test.zip format: - name: zip + type: zip # Archive one data set into terse - name: Archive data set into a terse @@ -396,7 +397,7 @@ src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse # Use terse with different options - name: Archive data set into a terse, specify pack algorithm and use adrdssu @@ -404,10 +405,10 @@ src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - terse_pack: "spack" - use_adrdssu: true + type: terse + options: + spack: true + adrdssu: true # Use a pattern to store - name: Archive data set pattern using xmit @@ -416,7 +417,7 @@ exclude_sources: "USER.ARCHIVE.EXCLUDE.*" dest: "USER.ARCHIVE.RESULT.XMIT" format: - name: xmit + type: xmit - name: Archive multiple GDSs into a terse zos_archive: @@ -426,25 +427,25 @@ - "USER.GDG(-2)" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true - name: Archive multiple data sets into a new GDS zos_archive: src: "USER.ARCHIVE.*" dest: "USER.GDG(+1)" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true - name: Encode the source data set into Latin-1 before archiving into a terse data set zos_archive: src: "USER.ARCHIVE.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse encoding: from: IBM-1047 to: ISO8859-1 @@ -456,9 +457,9 @@ - "USER.ARCHIVE2.TEST" dest: "USER.ARCHIVE.RESULT.TRS" format: - name: terse - format_options: - use_adrdssu: true + type: terse + options: + adrdssu: true encoding: from: IBM-1047 to: ISO8859-1 @@ -467,6 +468,12 @@ ''' RETURN = r''' +dest: + description: + - The remote absolute path or data set where the archive was + created. + type: str + returned: always state: description: - The state of the input C(src). @@ -574,7 +581,7 @@ def get_archive_handler(module): The archive format for the module. """ - format = module.params.get("format").get("name") + format = module.params.get("format").get("type") if format in ["tar", "gz", "bz2", "pax"]: return TarArchive(module) elif format == "terse": @@ -705,7 +712,7 @@ def __init__(self, module): """ self.module = module self.dest = module.params['dest'] - self.format = module.params.get("format").get("name") + self.format = module.params.get("format").get("type") self.remove = module.params['remove'] self.changed = False self.errors = [] @@ -1179,7 +1186,7 @@ def __init__(self, module): ---------- original_checksums : str The SHA256 hash of the contents of input file. - use_adrdssu : bool + adrdssu : bool Whether to use Data Facility Storage Management Subsystem data set services program ADRDSSU to uncompress data sets or not. expanded_sources : list[str] @@ -1198,7 +1205,7 @@ def __init__(self, module): super(MVSArchive, self).__init__(module) self.tmphlq = module.params.get("tmp_hlq") self.original_checksums = self.dest_checksums() - self.use_adrdssu = module.params.get("format").get("format_options").get("use_adrdssu") + self.adrdssu = module.params.get("format").get("options").get("adrdssu") self.expanded_sources = self.expand_mvs_paths(self.sources) self.expanded_exclude_sources = self.expand_mvs_paths(module.params['exclude']) self.sources = sorted(set(self.expanded_sources) - set(self.expanded_exclude_sources)) @@ -1339,18 +1346,6 @@ def create_dest_ds(self, name): """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length, tmphlq=self.tmphlq) - # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) - # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) - # rc, out, err = self.module.run_command(cmd) - - # if not changed: - # self.module.fail_json( - # msg="Failed preparing {0} to be used as an archive".format(name), - # stdout=out, - # stderr=err, - # stdout_lines=cmd, - # rc=rc, - # ) return name def dump_into_temp_ds(self, temp_ds): @@ -1619,13 +1614,10 @@ def __init__(self, module): Compression option for use with the terse format. """ super(AMATerseArchive, self).__init__(module) - self.pack_arg = module.params.get("format").get("format_options").get("terse_pack") + spack = module.params.get("format").get("options").get("spack") # We store pack_ard in uppercase because the AMATerse command requires # it in uppercase. - if self.pack_arg is None: - self.pack_arg = "SPACK" - else: - self.pack_arg = self.pack_arg.upper() + self.pack_arg = "SPACK" if spack else "PACK" def add(self, src, archive): """Archive src into archive using AMATERSE program. @@ -1665,9 +1657,9 @@ def archive_targets(self): Raises ------ fail_json - To archive multiple source data sets, you must use option 'use_adrdssu=True'. + To archive multiple source data sets, you must use option 'adrdssu=True'. """ - if self.use_adrdssu: + if self.adrdssu: source, changed = self._create_dest_data_set( type="seq", record_format="u", @@ -1682,7 +1674,7 @@ def archive_targets(self): # If we don't use a adrdssu container we cannot pack multiple data sets if len(self.targets) > 1: self.module.fail_json( - msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") + msg="To archive multiple source data sets, you must use option 'adrdssu=True'.") source = self.targets[0] dataset = data_set.MVSDataSet( name=self.dest, @@ -1714,7 +1706,7 @@ def __init__(self, module): The name of the data set to store xmit log output. """ super(XMITArchive, self).__init__(module) - self.xmit_log_data_set = module.params.get("format").get("format_options").get("xmit_log_data_set") + self.xmit_log_data_set = module.params.get("format").get("options").get("xmit_log_data_set") def add(self, src, archive): """Archive src into archive using TSO XMIT. @@ -1759,9 +1751,9 @@ def archive_targets(self): Raises ------ fail_json - To archive multiple source data sets, you must use option 'use_adrdssu=True'. + To archive multiple source data sets, you must use option 'adrdssu=True'. """ - if self.use_adrdssu: + if self.adrdssu: source, changed = self._create_dest_data_set( type="seq", record_format="u", @@ -1776,7 +1768,7 @@ def archive_targets(self): # If we don't use a adrdssu container we cannot pack multiple data sets if len(self.sources) > 1: self.module.fail_json( - msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") + msg="To archive multiple source data sets, you must use option 'adrdssu=True'.") source = self.sources[0] # dest = self.create_dest_ds(self.dest) dataset = data_set.MVSDataSet( @@ -1871,25 +1863,49 @@ def run_module(): format=dict( type='dict', options=dict( - name=dict( + type=dict( type='str', default='gz', - choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'], + aliases=['name'], + deprecated_aliases=[ + dict( + name='name', + version='3.0.0', + collection_name='ibm.ibm_zos_core' + ) + ], ), - format_options=dict( + options=dict( type='dict', required=False, + aliases=['format_options'], + deprecated_aliases=[ + dict( + name='format_options', + version='3.0.0', + collection_name='ibm.ibm_zos_core' + ) + ], options=dict( - terse_pack=dict( - type='str', - choices=['pack', 'spack'], + spack=dict( + type='bool', + default=True, ), xmit_log_data_set=dict( type='str', ), - use_adrdssu=dict( + adrdssu=dict( type='bool', default=False, + aliases=['use_adrdssu'], + deprecated_aliases=[ + dict( + name='use_adrdssu', + version='3.0.0', + collection_name='ibm.ibm_zos_core' + ) + ], ) ), ), @@ -1966,41 +1982,44 @@ def run_module(): format=dict( type='dict', options=dict( - name=dict( + type=dict( type='str', default='gz', - choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] + choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'], + aliases=['name'], ), - format_options=dict( + options=dict( type='dict', required=False, options=dict( - terse_pack=dict( - type='str', + spack=dict( + type='bool', required=False, - choices=['pack', 'spack'], + default=True, ), xmit_log_data_set=dict( type='str', required=False, ), - use_adrdssu=dict( + adrdssu=dict( type='bool', default=False, + aliases=['use_adrdssu'], ) ), default=dict( - terse_pack="spack", + spack=True, xmit_log_data_set="", - use_adrdssu=False), + adrdssu=False), + aliases=['format_options'], ), ), default=dict( - name="", - format_options=dict( - terse_pack="spack", + type="", + options=dict( + spack=True, xmit_log_data_set="", - use_adrdssu=False + adrdssu=False ) ), ), diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index a3261ae70e..6fcd45abae 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -33,6 +33,54 @@ backups can be restored to systems where Ansible and ZOAU are not available. Conversely, dumps created with ADRDSSU and AMATERSE can be restored using this module. options: + access: + description: + - Specifies how the module will access data sets and z/OS UNIX files when + performing a backup or restore operation. + type: dict + required: false + suboptions: + share: + description: + - Specifies that the module allow data set read access to other programs + while backing up or restoring. + - I(share) and C(full_volume) are mutually exclusive; you cannot use both. + - Option I(share)is conditionally supported for I(operation=backup) or + I(operation=restore). + - When I(operation=backup), and source backup is a VSAM data set, the + option is only supported for VSAM data sets which are not defined with + VSAM SHAREOPTIONS (1,3) or (1,4). + - When I(operation=restore), and restore target is a VSAM data set or + PDSE data set, this option is not supported. Both data set types will + be accessed exlusivly preventing reading or writing to the VSAM, PDSE, + or PDSE members. + - The SHAREOPTIONS for VSAM data sets. + - (1) the data set can be shared by multiple programs for read-only + processing, or a single program for read and write processing. + - (2) the data set can be accessed by multiple programs for read-only + processing, and can also be accessed by a program for write processing. + - (3) the data set can be shared by multiple programs where each + program is responsible for maintaining both read and write data integrity. + - (4) the data set can be shared by multiple programs where each program is + responsible for maintaining both read and write data integrity differing + from (3) in that I/O buffers are updated for each request. + type: bool + required: false + default: false + auth: + description: + - I(auth=true) allows you to act as an administrator, where it will disable + checking the current users privileges for z/OS UNIX files, data sets and + catalogs. + - This is option is supported both, I(operation=backup) and I(operation=restore). + - If you are not authorized to use this option, the module ends with an + error message. + - Some authorization checking for data sets is unavoidable, when when I(auth) + is specified because some checks are initiated by services and programs + invoked by this module which can not be bypassed. + type: bool + required: false + default: false operation: description: - Used to specify the operation to perform. @@ -153,24 +201,84 @@ default: True sms_storage_class: description: - - When I(operation=restore), specifies the storage class to use. The storage class will - also be used for temporary data sets created during restore process. - - When I(operation=backup), specifies the storage class to use for temporary data sets - created during backup process. - - If neither of I(sms_storage_class) or I(sms_management_class) are specified, the z/OS - system's Automatic Class Selection (ACS) routines will be used. - type: str - required: False - sms_management_class: + - When I(operation=backup), enables compression of partitioned data sets using system-level + compression features. If supported, this may utilize zEDC hardware compression. + - This option can reduce the size of the temporary dataset generated during backup operations + either before the AMATERSE step when I(terse) is True or the resulting backup + when I(terse) is False. + type: bool + default: False + terse: description: - - When I(operation=restore), specifies the management class to use. The management class - will also be used for temporary data sets created during restore process. - - When I(operation=backup), specifies the management class to use for temporary data sets - created during backup process. - - If neither of I(sms_storage_class) or I(sms_management_class) are specified, the z/OS - system's Automatic Class Selection (ACS) routines will be used. - type: str - required: False + - When I(operation=backup), executes an AMATERSE step to compress and pack the temporary data set + for the backup. This creates a backup with a format suitable for transferring off-platform. + - If I(operation=backup) and if I(dataset=False) then option I(terse) must be True. + type: bool + default: True + sms: + description: + - Specifies how System Managed Storage (SMS) interacts with the storage class + and management class when either backup or restore operations are occurring. + - Storage class contains performance and availability attributes related to the storage occupied by the data set. + A data set that has a storage class assigned to it is defined as an 'SMS-managed' data set. + - Management class contains the data set attributes related to the migration and backup of the data set and the + expiration date of the data set. A management class can be assigned only to a data set that also has a + storage class assigned. + type: dict + required: false + suboptions: + storage_class: + description: + - When I(operation=restore), specifies the storage class to use. The storage class will + also be used for temporary data sets created during restore process. + - When I(operation=backup), specifies the storage class to use for temporary data sets + created during backup process. + - If neither of I(sms_storage_class) or I(sms_management_class) are specified, the z/OS + system's Automatic Class Selection (ACS) routines will be used. + type: str + required: False + management_class: + description: + - When I(operation=restore), specifies the management class to use. The management class + will also be used for temporary data sets created during restore process. + - When I(operation=backup), specifies the management class to use for temporary data sets + created during backup process. + - If neither of I(sms_storage_class) or I(sms_management_class) are specified, the z/OS + system's Automatic Class Selection (ACS) routines will be used. + type: str + required: False + disable_automatic_class: + description: + - Specifies that the automatic class selection (ACS) routines will not be + used to determine the target data set class names for the provided list. + - The list must contain fully or partially qualified data set names. + - To include all selected data sets, "**" in a list. + - You must have READ access to RACF FACILITY class profile + `STGADMIN.ADR.RESTORE.BYPASSACS` to use this option. + type: list + elements: str + required: false + default: [] + disable_automatic_storage_class: + description: + - Specifies that automatic class selection (ACS) routines will not be used + to determine the source data set storage class. + - Enabling I(disable_automatic_storage_class) ensures ACS is null. + - I(storage_class) and I(disable_automatic_storage_class) are mutually exclusive; you cannot use both. + - The combination of I(disable_automatic_storage_class) and C(disable_automatic_class=[dsn,dsn1,...]) + ensures the selected data sets will not be SMS-managed. + type: bool + required: false + default: false + disable_automatic_management_class: + description: + - Specifies that automatic class selection (ACS) routines will not be used + to determine the source data set management class. + - Enabling I(disable_automatic_storage_class) ensures ACS is null. + - I(management_class) and I(disable_automatic_management_class) are mutually exclusive; you cannot use both. + type: bool + required: false + default: false space: description: - If I(operation=backup), specifies the amount of space to allocate for the backup. @@ -217,6 +325,19 @@ not be identified, the value C(TMPHLQ) is used. required: false type: str + index: + description: + - When C(operation=backup) specifies that for any VSAM cluster backup, the backup must also contain + all the associated alternate index (AIX®) clusters and paths. + - When C(operation=restore) specifies that for any VSAM cluster dumped with the SPHERE keyword, + the module must also restore all associated AIX® clusters and paths. + - The alternate index is a VSAM function that allows logical records of a + KSDS or ESDS to be accessed sequentially and directly by more than one key + field. The cluster that has the data is called the base cluster. An + alternate index cluster is then built from the base cluster. + type: bool + required: false + default: false attributes: action: @@ -372,9 +493,54 @@ operation: restore volume: MYVOL2 backup_name: /tmp/temp_backup.dzp - sms_storage_class: DB2SMS10 - sms_management_class: DB2SMS10 + sms: + storage_class: DB2SMS10 + management_class: DB2SMS10 + +- name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp. + Disable for all datasets SMS storage and management classes data sets. + zos_backup_restore: + operation: restore + volume: MYVOL2 + backup_name: /tmp/temp_backup.dzp + sms: + disable_automatic_class: + - "**" + disable_automatic_storage_class: true + disable_automatic_management_class: true + +- name: Restore data sets from backup stored in the MVS file MY.BACKUP.DZP + Disable for al some datasets SMS storage and management classes data sets. + zos_backup_restore: + operation: restore + volume: MYVOL2 + backup_name: MY.BACKUP.DZP + sms: + disable_automatic_class: + - "ANSIBLE.TEST.**" + - "**.ONE.**" + disable_automatic_storage_class: true + disable_automatic_management_class: true + +- name: Backup all data sets matching the pattern USER.VSAM.** to z/OS UNIX + file /tmp/temp_backup.dzp and ensure the VSAM alternate index are preserved. + zos_backup_restore: + operation: backup + data_sets: + include: user.vsam.** + backup_name: /tmp/temp_backup.dzp + index: true + +- name: Restore data sets from backup stored in the UNIX file /tmp/temp_backup.dzp + whether they exist or not and do so as authorized disabling any security checks. + zos_backup_restore: + operation: restore + backup_name: /tmp/temp_backup.dzp + access: + auth: true + share: true """ + RETURN = r""" changed: description: @@ -428,6 +594,14 @@ def main(): """ result = dict(changed=False, message="", backup_name="") module_args = dict( + access=dict( + type='dict', + required=False, + options=dict( + share=dict(type='bool', default=False), + auth=dict(type='bool', default=False) + ) + ), operation=dict(type="str", required=True, choices=["backup", "restore"]), data_sets=dict( required=False, @@ -451,6 +625,8 @@ def main(): sms_management_class=dict(type="str", required=False), hlq=dict(type="str", required=False), tmp_hlq=dict(type="str", required=False), + # 2.0 redesign extra values for ADRDSSU keywords + index=dict(type="bool", required=False, default=False), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=False) @@ -472,6 +648,17 @@ def main(): sms_management_class = params.get("sms_management_class") hlq = params.get("hlq") tmp_hlq = params.get("tmp_hlq") + sphere = params.get("index") + access = params.get('access') + + if sms and bool(sms.get("storage_class")) and sms.get("disable_automatic_storage_class"): + module.fail_json(msg="storage_class and disable_automatic_storage_class are mutually exclusive, only one can be use by operation.") + + if sms and bool(sms.get("management_class")) and sms.get("disable_automatic_management_class"): + module.fail_json(msg="management_class and disable_automatic_management_class are mutually exclusive, only one can be use by operation.") + + if access and access.get("share") and full_volume: + module.fail_json(msg="access.share cannot be used with full_volume. These options are mutually exclusive.") if operation == "backup": backup( @@ -487,9 +674,10 @@ def main(): recover=recover, space=space, space_type=space_type, - sms_storage_class=sms_storage_class, - sms_management_class=sms_management_class, + sms=sms, tmp_hlq=tmp_hlq, + sphere=sphere, + access=access, ) else: restore( @@ -504,9 +692,10 @@ def main(): hlq=hlq, space=space, space_type=space_type, - sms_storage_class=sms_storage_class, - sms_management_class=sms_management_class, + sms=sms, tmp_hlq=tmp_hlq, + sphere=sphere, + access=access, ) result["backup_name"] = backup_name result["changed"] = True @@ -550,6 +739,14 @@ def parse_and_validate_args(params): The updated params after additional parsing and validation. """ arg_defs = dict( + access=dict( + type='dict', + required=False, + options=dict( + share=dict(type='bool', default=False), + auth=dict(type='bool', default=False) + ) + ), operation=dict(type="str", required=True, choices=["backup", "restore"]), data_sets=dict( required=False, @@ -583,6 +780,8 @@ def parse_and_validate_args(params): sms_management_class=dict(type=sms_type, required=False), hlq=dict(type=hlq_type, default=None, dependencies=["operation"]), tmp_hlq=dict(type=hlq_type, required=False), + # 2.0 redesign extra values for ADRDSSU keywords + index=dict(type="bool", required=False, default=False), ) parsed_args = BetterArgParser(arg_defs).parse_args(params) @@ -605,9 +804,10 @@ def backup( recover, space, space_type, - sms_storage_class, - sms_management_class, + sms, tmp_hlq, + sphere, + access, ): """Backup data sets or a volume to a new data set or unix file. @@ -637,10 +837,8 @@ def backup( Specifies the amount of space to allocate for the backup. space_type : str The unit of measurement to use when defining data set space. - sms_storage_class : str - Specifies the storage class to use. - sms_management_class : str - Specifies the management class to use. + sms : dict + Specifies how System Managed Storage (SMS) interacts with the storage class. tmp_hlq : str Specifies the tmp hlq to temporary datasets. @@ -662,9 +860,10 @@ def restore( hlq, space, space_type, - sms_storage_class, - sms_management_class, + sms, tmp_hlq, + sphere, + access, ): """Restore data sets or a volume from the backup. @@ -696,12 +895,14 @@ def restore( created during the restore process. space_type : str The unit of measurement to use when defining data set space. - sms_storage_class : str - Specifies the storage class to use. - sms_management_class : str - Specifies the management class to use. + sms : dict + Specifies how System Managed Storage (SMS) interacts with the storage class. tmp_hlq : str Specifies the tmp hlq to temporary datasets. + sphere : dict + Specifies ADRDSSU keywords that is passed directly to the dunzip utility. + access : dict + Specifies keywords for share and administration permission. Raises ------ @@ -729,6 +930,70 @@ def restore( ) +def set_adrdssu_keywords(sphere, sms=None, access=None): + """Set the values for special keywords, dunzip use key value for most special words. + + Parameters + ---------- + sms : dict + Dictionary of key value of management an storage class. + sphere : bool + Value if sphere will be use on dictionary for VSAM. + access : dict + Dictionary of key values for management classes. + Returns + ------- + keywords : dict + Dictionary with key value paris. + """ + keywords = {} + + if sphere: + keywords.update(sphere=None) + + if sms: + if sms.get("disable_automatic_management_class"): + sms["management_class"] = "NULLMGMTCLAS" + + if sms.get("disable_automatic_storage_class"): + sms["storage_class"] = "NULLSTORCLAS" + + if len(sms.get("disable_automatic_class")) > 0: + bypassacs = set_bypassacs_str(sms.get("disable_automatic_class")) + keywords.update(bypass_acs=bypassacs) + + if access: + if access.get("auth"): + keywords.update(ADMINISTRATOR="ADMINistrator") + + if access.get("share"): + keywords.update(SHARE="SHAre") + + return keywords + + +def set_bypassacs_str(ds): + """_summary_ + + Parameters + ---------- + ds : list + List of datasets to be use. + + Returns + ------- + str : Datasets on str format. + """ + datasets = "" + if len(ds) > 0: + for dataset in ds: + if dataset == "**": + return "**" + datasets += f"{datasets} " + + return datasets + + def get_real_rc(output): """Parse out the final RC from MVS program output. @@ -1014,8 +1279,8 @@ def to_dzip_args(**kwargs): if kwargs.get("sms_storage_class"): zoau_args["storage_class_name"] = kwargs.get("sms_storage_class") - if kwargs.get("sms_management_class"): - zoau_args["management_class_name"] = kwargs.get("sms_management_class") + if kwargs.get("terse"): + zoau_args["terse"] = kwargs.get("terse") if kwargs.get("space"): size = str(kwargs.get("space")) @@ -1026,6 +1291,19 @@ def to_dzip_args(**kwargs): if kwargs.get("tmp_hlq"): zoau_args["tmphlq"] = str(kwargs.get("tmp_hlq")) + sms = kwargs.get("sms") + keywords = set_adrdssu_keywords(sphere=kwargs.get("sphere"), sms=sms) + + if sms: + if sms.get("storage_class"): + zoau_args["storage_class_name"] = sms.get("storage_class") + + if sms.get("management_class"): + zoau_args["management_class_name"] = sms.get("management_class") + + if keywords: + zoau_args["keywords"] = keywords + return zoau_args @@ -1065,12 +1343,6 @@ def to_dunzip_args(**kwargs): zoau_args["overwrite"] = kwargs.get("overwrite") sms_specified = False - if kwargs.get("sms_storage_class"): - zoau_args["storage_class_name"] = kwargs.get("sms_storage_class") - - if kwargs.get("sms_management_class"): - zoau_args["management_class_name"] = kwargs.get("sms_management_class") - if sms_specified: zoau_args["sms_for_tmp"] = True @@ -1089,6 +1361,37 @@ def to_dunzip_args(**kwargs): zoau_args["high_level_qualifier"] = str(kwargs.get("tmp_hlq")) zoau_args["keep_original_hlq"] = False + sms = kwargs.get("sms") + access = kwargs.get("access") + keywords = set_adrdssu_keywords(sphere=kwargs.get("sphere")) + + if sms: + if sms.get("sms_storage_class"): + zoau_args["storage_class_name"] = sms.get("storage_class") + + if sms.get("sms_management_class"): + zoau_args["management_class_name"] = sms.get("management_class") + + if sms.get("disable_automatic_management_class"): + zoau_args["null_management_class"] = sms.get("disable_automatic_management_class") + + if sms.get("disable_automatic_storage_class"): + zoau_args["null_storage_class"] = sms.get("disable_automatic_storage_class") + + if len(sms.get("disable_automatic_class")) > 0: + bypassacs = set_bypassacs_str(ds=sms.get("disable_automatic_class")) + zoau_args["bypass_acs"] = bypassacs + + if access: + if access.get("auth"): + zoau_args['admin'] = access.get("auth") + + if access.get("share"): + zoau_args['share'] = access.get("share") + + if keywords: + zoau_args["keywords"] = keywords + return zoau_args diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 31f70f6872..fce846d006 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -81,6 +81,7 @@ - Choices are EOF or '*regex*'. - Default is EOF. required: false + aliases: ['after'] type: str insertbefore: description: @@ -92,6 +93,7 @@ at the end of the file. - Choices are BOF or '*regex*'. required: false + aliases: ['before'] type: str marker_begin: description: @@ -346,6 +348,14 @@ returned: failure type: str sample: BGYSC1311E Iconv error, cannot open converter from ISO-88955-1 to IBM-1047 +stdout_lines: + description: List of strings containing individual lines from stdout. + returned: failure + type: list +stderr_lines: + description: List of strings containing individual lines from stderr. + returned: failure + type: list rc: description: The return code from ZOAU dmod when json.loads() fails to parse the result from dmod returned: failure @@ -511,10 +521,12 @@ def main(): aliases=['content'] ), insertafter=dict( - type='str' + type='str', + aliases=['after'], ), insertbefore=dict( - type='str' + type='str', + aliases=['before'], ), marker_begin=dict( type='str', @@ -560,8 +572,8 @@ def main(): state=dict(arg_type='str', default='present', choices=['absent', 'present']), marker=dict(arg_type='str', default='# {mark} ANSIBLE MANAGED BLOCK', required=False), block=dict(arg_type='str', default='', aliases=['content'], required=False), - insertafter=dict(arg_type='str', required=False), - insertbefore=dict(arg_type='str', required=False), + insertafter=dict(arg_type='str', required=False, aliases=['after'],), + insertbefore=dict(arg_type='str', required=False, aliases=['before'],), marker_begin=dict(arg_type='str', default='BEGIN', required=False), marker_end=dict(arg_type='str', default='END', required=False), encoding=dict(arg_type='str', default='IBM-1047', required=False), @@ -572,7 +584,16 @@ def main(): mutually_exclusive=[['insertbefore', 'insertafter']], indentation=dict(arg_type='int', default=0, required=False) ) - result = dict(changed=False, cmd='', found=0) + result = dict( + changed=False, + cmd='', + found=0, + stdout='', + stdout_lines=[], + stderr='', + stderr_lines=[], + rc=0, + ) try: parser = better_arg_parser.BetterArgParser(arg_defs) parsed_args = parser.parse_args(module.params) @@ -665,10 +686,17 @@ def main(): # The triple double quotes is required for special characters (/_) been scape ret = json.loads("""{0}""".format(stdout)) except Exception: - messageDict = dict(msg="ZOAU dmod return content is NOT in json format", stdout=str(stdout), stderr=str(stderr), rc=rc) - if result.get('backup_name'): - messageDict['backup_name'] = result['backup_name'] - module.fail_json(**messageDict) + result.update( + dict( + msg="ZOAU dmod return content is NOT in json format", + stdout=str(stdout), + stdout_lines=stdout.splitlines(), + stderr=str(stderr), + stderr_lines=stderr.splitlines(), + rc=rc + ) + ) + module.fail_json(**result) result['cmd'] = ret['data']['commands'] result['changed'] = ret['data']['changed'] diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 0d2ac02134..a6a2e43126 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -44,7 +44,7 @@ - If neither C(src) or C(dest) have record format Fixed Block with ANSI format (FBA) or Variable Block with ANSI format (VBA), the module will fail. - - This option is only valid for text files. If C(is_binary) is C(true) + - This option is only valid for text files. If C(binary) is C(true) or C(executable) is C(true) as well, the module will fail. type: bool default: false @@ -114,7 +114,7 @@ process outlined here and in the C(volume) option. - If C(dest) is a nonexistent data set, the attributes assigned will depend on the type of C(src). If C(src) is a USS file, C(dest) will have a Fixed Block (FB) record format and the - remaining attributes will be computed. If I(is_binary=true), C(dest) will have a Fixed Block + remaining attributes will be computed. If I(binary=true), C(dest) will have a Fixed Block (FB) record format with a record length of 80, block size of 32720, and the remaining attributes will be computed. If I(executable=true),C(dest) will have an Undefined (U) record format with a record length of 0, block size of 32760, and the remaining attributes will be @@ -154,7 +154,7 @@ - If C(encoding) is not provided, the module determines which local and remote charsets to convert the data from and to. Note that this is only done for text data and not binary data. - - Only valid if C(is_binary) is false. + - Only valid if C(binary) is false. type: dict required: false suboptions: @@ -176,7 +176,7 @@ that is not available, then the value C(TMPHLQ) is used. required: false type: str - force: + replace: description: - If set to C(true) and the remote file or data set C(dest) is empty, the C(dest) will be reused. @@ -193,14 +193,14 @@ type: bool default: false required: false - force_lock: + force: description: - By default, when C(dest) is a MVS data set and is being used by another - process with DISP=SHR or DISP=OLD the module will fail. Use C(force_lock) + process with DISP=SHR or DISP=OLD the module will fail. Use C(force) to bypass DISP=SHR and continue with the copy operation. - If set to C(true) and destination is a MVS data set opened by another process then zos_copy will try to copy using DISP=SHR. - - Using C(force_lock) uses operations that are subject to race conditions + - Using C(force) uses operations that are subject to race conditions and can lead to data loss, use with caution. - If a data set member has aliases, and is not a program object, copying that member to a dataset that is in use will result in @@ -225,13 +225,13 @@ required: false default: true version_added: "1.4.0" - is_binary: + binary: description: - If set to C(true), indicates that the file or data set to be copied is a binary file or data set. - - When I(is_binary=true), no encoding conversion is applied to the content, + - When I(binary=true), no encoding conversion is applied to the content, all content transferred retains the original state. - - Use I(is_binary=true) when copying a Database Request Module (DBRM) to + - Use I(binary=true) when copying a Database Request Module (DBRM) to retain the original state of the serialized SQL statements of a program. type: bool default: false @@ -656,7 +656,7 @@ zos_copy: src: /path/to/binary/file dest: HLQ.SAMPLE.PDSE(MEMBER) - is_binary: true + binary: true - name: Copy a sequential data set to a PDS member zos_copy: @@ -682,14 +682,14 @@ src: HLQ.SAMPLE.PDSE dest: HLQ.EXISTING.PDSE remote_src: true - force: true + replace: true - name: Copy PDS member to a new PDS member. Replace if it already exists zos_copy: src: HLQ.SAMPLE.PDSE(SRCMEM) dest: HLQ.NEW.PDSE(DESTMEM) remote_src: true - force: true + replace: true - name: Copy a USS file to a PDSE member. If PDSE does not exist, allocate it zos_copy: @@ -897,7 +897,7 @@ sample: file note: description: A note to the user after module terminates. - returned: When ``force=true`` and ``dest`` exists + returned: When ``replace=true`` and ``dest`` exists type: str sample: No data was copied msg: @@ -986,12 +986,12 @@ class CopyHandler(object): def __init__( self, module, - is_binary=False, + binary=False, executable=False, aliases=False, asa_text=False, backup_name=None, - force_lock=False, + force=False, identical_gdg_copy=False, tmphlq=None ): @@ -1005,7 +1005,7 @@ def __init__( Keyword Parameters ------------------ - is_binary : bool + binary : bool Whether the file or data set to be copied contains binary data. executable : bool @@ -1018,7 +1018,7 @@ def __init__( backup_name : str The USS path or data set name of destination backup. - force_lock : str + force : str Whether the dest data set should be copied into using disp=shr when is opened by another process. @@ -1030,7 +1030,7 @@ def __init__( module : AnsibleModule The AnsibleModule object from currently running module. - is_binary : bool + binary : bool Whether the file or data set to be copied contains binary data. executable : bool @@ -1043,7 +1043,7 @@ def __init__( backup_name : str The USS path or data set name of destination backup. - force_lock : str + force : str Whether the dest data set should be copied into using disp=shr when is opened by another process. @@ -1051,12 +1051,12 @@ def __init__( High Level Qualifier for temporary datasets. """ self.module = module - self.is_binary = is_binary + self.binary = binary self.executable = executable self.asa_text = asa_text self.aliases = aliases self.backup_name = backup_name - self.force_lock = force_lock + self.force = force self.identical_gdg_copy = identical_gdg_copy self.tmphlq = tmphlq @@ -1110,7 +1110,7 @@ def copy_to_seq( copy_args["options"] = "" if src_type == 'USS' and self.asa_text: - response = copy.copy_asa_uss2mvs(new_src, dest, tmphlq=self.tmphlq, force_lock=self.force_lock) + response = copy.copy_asa_uss2mvs(new_src, dest, tmphlq=self.tmphlq, force=self.force) if response.rc != 0: raise CopyOperationError( @@ -1122,11 +1122,11 @@ def copy_to_seq( else: # While ASA files are just text files, we do a binary copy # so dcp doesn't introduce any additional blanks or newlines. - if self.is_binary or self.asa_text: + if self.binary or self.asa_text: copy_args["options"] = "-B" try: - datasets.copy(new_src, dest, force=self.force_lock, **copy_args) + datasets.copy(new_src, dest, force=self.force, **copy_args) except zoau_exceptions.ZOAUException as copy_exception: raise CopyOperationError( msg="Unable to copy source {0} to {1}".format(new_src, dest), @@ -1150,7 +1150,7 @@ def copy_to_vsam(self, src, dest): CopyOperationError When REPRO fails to copy the data set. """ - out_dsp = "shr" if self.force_lock else "old" + out_dsp = "shr" if self.force else "old" dds = {"OUT": "{0},{1}".format(dest.upper(), out_dsp)} repro_cmd = """ REPRO - INDATASET('{0}') - @@ -1189,7 +1189,7 @@ def copy_to_gdg(self, src, dest): copy_args = { "options": "" } - if self.is_binary or self.asa_text: + if self.binary or self.asa_text: copy_args["options"] = "-B" success = True @@ -1577,7 +1577,7 @@ class USSCopyHandler(CopyHandler): def __init__( self, module, - is_binary=False, + binary=False, executable=False, asa_text=False, aliases=False, @@ -1598,7 +1598,7 @@ def __init__( common_file_args : dict Mode, group and owner information to be applied to destination file. - is_binary : bool + binary : bool Whether the file to be copied contains binary data. backup_name : str The USS path or data set name of destination backup. @@ -1613,7 +1613,7 @@ def __init__( """ super().__init__( module, - is_binary=is_binary, + binary=binary, executable=executable, asa_text=asa_text, aliases=aliases, @@ -1630,7 +1630,7 @@ def copy_to_uss( src_ds_type, src_member, member_name, - force, + replace, content_copy, ): """Copy a file or data set to a USS location. @@ -1652,7 +1652,7 @@ def copy_to_uss( Whether src is a data set member. member_name : str The name of the source data set member. - force : bool + replace : bool Whether to copy files to an already existing directory. content_copy : bool Whether copy is using content option or not. @@ -1696,7 +1696,7 @@ def copy_to_uss( dest = self._copy_to_file(src, dest, content_copy, conv_path) changed_files = None else: - dest, changed_files = self._copy_to_dir(src, dest, conv_path, force) + dest, changed_files = self._copy_to_dir(src, dest, conv_path, replace) if self.common_file_args is not None: mode = self.common_file_args.get("mode") @@ -1746,8 +1746,8 @@ def _copy_to_file(self, src, dest, content_copy, conv_path): dest = os.path.join(validation.validate_safe_path(dest), validation.validate_safe_path(src_path)) new_src = conv_path or src try: - if self.is_binary: - copy.copy_uss_mvs(new_src, dest, is_binary=True) + if self.binary: + copy.copy_uss_mvs(new_src, dest, binary=True) else: opts = dict() opts["options"] = "" @@ -1779,7 +1779,7 @@ def _copy_to_dir( src_dir, dest_dir, conv_path, - force + replace ): """Helper function to copy a USS directory to another USS directory. If the path for dest_dir does not end with a trailing slash ("/"), @@ -1793,7 +1793,7 @@ def _copy_to_dir( USS dest directory. conv_path : str Path to the converted source directory. - force :bool + replace :bool Whether to copy files to an already existing directory. Returns @@ -1818,7 +1818,7 @@ def _copy_to_dir( if copy_directory: dest = os.path.join(validation.validate_safe_path(dest_dir), validation.validate_safe_path(os.path.basename(os.path.normpath(src_dir)))) # dest = shutil.copytree(new_src_dir, dest, dirs_exist_ok=force) - dest = self.copy_tree(new_src_dir, dest, dirs_exist_ok=force) + dest = self.copy_tree(new_src_dir, dest, dirs_exist_ok=replace) # Restoring permissions for preexisting files and subdirectories. for filepath, permissions in original_permissions: @@ -1992,7 +1992,7 @@ def _mvs_copy_to_uss( result = copy.copy_gdg2uss( src, dest, - is_binary=self.is_binary, + binary=self.binary, asa_text=self.asa_text ) @@ -2024,7 +2024,7 @@ def _mvs_copy_to_uss( copy.copy_uss_mvs( src, dest, - is_binary=self.is_binary + binary=self.binary ) except CopyOperationError as err: raise err @@ -2036,12 +2036,12 @@ class PDSECopyHandler(CopyHandler): def __init__( self, module, - is_binary=False, + binary=False, executable=False, aliases=False, asa_text=False, backup_name=None, - force_lock=False, + force=False, tmphlq=None ): """ Utility class to handle copying to partitioned data sets or @@ -2055,7 +2055,7 @@ def __init__( Keyword Parameters ------------------ - is_binary : bool + binary : bool Whether the data set to be copied contains binary data. backup_name : str @@ -2065,12 +2065,12 @@ def __init__( """ super().__init__( module, - is_binary=is_binary, + binary=binary, executable=executable, aliases=aliases, asa_text=asa_text, backup_name=backup_name, - force_lock=force_lock, + force=force, tmphlq=tmphlq ) @@ -2121,7 +2121,7 @@ def copy_to_pdse( path, dirs, files = next(os.walk(new_src)) src_members = [ - os.path.normpath("{0}/{1}".format(path, file)) if (self.is_binary or self.executable) + os.path.normpath("{0}/{1}".format(path, file)) if (self.binary or self.executable) else normalize_line_endings("{0}/{1}".format(path, file), encoding) for file in files ] @@ -2243,16 +2243,16 @@ def copy_to_member( opts["options"] = "" if src_type == 'USS' and self.asa_text: - response = copy.copy_asa_uss2mvs(src, dest, tmphlq=self.tmphlq, force_lock=self.force_lock) + response = copy.copy_asa_uss2mvs(src, dest, tmphlq=self.tmphlq, force=self.force) rc, out, err = response.rc, response.stdout_response, response.stderr_response else: # While ASA files are just text files, we do a binary copy # so dcp doesn't introduce any additional blanks or newlines. - if self.is_binary or self.asa_text: + if self.binary or self.asa_text: opts["options"] = "-B" try: - rc = datasets.copy(src, dest, alias=self.aliases, executable=self.executable, force=self.force_lock, **opts) + rc = datasets.copy(src, dest, alias=self.aliases, executable=self.executable, force=self.force, **opts) out = "" err = "" except zoau_exceptions.ZOAUException as copy_exception: @@ -2299,14 +2299,14 @@ def get_file_record_length(file): return max_line_length -def dump_data_set_member_to_file(data_set_member, is_binary): +def dump_data_set_member_to_file(data_set_member, binary): """Dumps a data set member into a file in USS. Parameters ---------- data_set_member : str Name of the data set member to dump. - is_binary : bool + binary : bool Whether the data set member contains binary data. Returns @@ -2323,7 +2323,7 @@ def dump_data_set_member_to_file(data_set_member, is_binary): os.close(fd) copy_args = dict() - if is_binary: + if binary: copy_args["options"] = "-B" response = datasets.copy(data_set_member, temp_path, **copy_args) @@ -2336,7 +2336,7 @@ def dump_data_set_member_to_file(data_set_member, is_binary): def get_data_set_attributes( name, size, - is_binary, + binary, asa_text=False, record_format=None, record_length=None, @@ -2363,7 +2363,7 @@ def get_data_set_attributes( Name of the new sequential data set. size : int Number of bytes needed for the new data set. - is_binary : bool + binary : bool Whether or not the data set will have binary data. asa_text : bool Whether the data set will have ASA control characters. @@ -2388,14 +2388,14 @@ def get_data_set_attributes( # set default value - record_format if record_format is None: - if is_binary: + if binary: record_format = "FB" else: record_format = "VB" # set default value - record_length if record_length is None: - if is_binary: + if binary: record_length = 80 else: record_length = 1028 @@ -2433,8 +2433,8 @@ def get_data_set_attributes( def create_seq_dataset_from_file( file, dest, - force, - is_binary, + replace, + binary, asa_text, record_length=None, volume=None, @@ -2449,9 +2449,9 @@ def create_seq_dataset_from_file( Path of the source file. dest : str Name of the data set. - force : bool + replace : bool Whether to replace an existing data set. - is_binary : bool + binary : bool Whether the file has binary data. asa_text bool Whether the file has ASA control characters. @@ -2472,7 +2472,7 @@ def create_seq_dataset_from_file( # When src is a binary file, the module will use default attributes # for the data set, such as a record format of "VB". - if not is_binary: + if not binary: record_format = "FB" if not record_length: record_length = get_file_record_length(file) @@ -2486,14 +2486,14 @@ def create_seq_dataset_from_file( dest_params = get_data_set_attributes( name=dest, size=src_size, - is_binary=is_binary, + binary=binary, asa_text=asa_text, record_format=record_format, record_length=record_length, volume=volume ) - data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) + data_set.DataSet.ensure_present(replace=replace, tmp_hlq=tmphlq, **dest_params) def backup_data(ds_name, ds_type, backup_name, tmphlq=None): @@ -2706,7 +2706,7 @@ def does_destination_allow_copy( member_exists, dest_type, is_uss, - force, + replace, volume=None, tmphlq=None ): @@ -2729,7 +2729,7 @@ def does_destination_allow_copy( Type of the destination (SEQ/PARTITIONED/VSAM/USS). is_uss : bool Whether or not the destination is inside USS. - force : bool + replace : bool Whether or not the module can replace existing destinations. volume : str, optional Volume where the destination should be. @@ -2745,21 +2745,21 @@ def does_destination_allow_copy( # If the destination is inside USS and the module doesn't have permission to replace it, # it fails. if is_uss and dest_exists: - if src_type == "USS" and os.path.isdir(dest) and os.path.isdir(src) and not force: + if src_type == "USS" and os.path.isdir(dest) and os.path.isdir(src) and not replace: return False - elif os.path.isfile(dest) and not force: + elif os.path.isfile(dest) and not replace: return False # If the destination is a sequential or VSAM data set and is empty, the module will try to use it, # otherwise, force needs to be True to continue and replace it. if (dest_type in data_set.DataSet.MVS_SEQ or dest_type in data_set.DataSet.MVS_VSAM) and dest_exists: is_dest_empty = data_set.DataSet.is_empty(dest, volume, tmphlq=tmphlq) - if not (is_dest_empty or force): + if not (is_dest_empty or replace): return False # When the destination is a partitioned data set, the module will have to be able to replace # existing members inside of it, if needed. - if dest_type in data_set.DataSet.MVS_PARTITIONED and dest_exists and member_exists and not force: + if dest_type in data_set.DataSet.MVS_PARTITIONED and dest_exists and member_exists and not replace: return False # When the destination is an existing GDG, we'll check that we have enough free generations @@ -2865,7 +2865,7 @@ def get_attributes_of_any_dataset_created( src_ds_type, src, src_name, - is_binary, + binary, asa_text, volume=None ): @@ -2882,7 +2882,7 @@ def get_attributes_of_any_dataset_created( Name of the source data set, used as a model when appropiate. src_name : str Extraction of the source name without the member pattern. - is_binary : bool + binary : bool Whether the data set will contain binary data. asa_text : bool Whether the data set will contain ASA control characters. @@ -2902,7 +2902,7 @@ def get_attributes_of_any_dataset_created( params = get_data_set_attributes( dest, size=size, - is_binary=is_binary, + binary=binary, asa_text=asa_text, volume=volume ) @@ -2911,7 +2911,7 @@ def get_attributes_of_any_dataset_created( params = get_data_set_attributes( dest, size=size, - is_binary=is_binary, + binary=binary, asa_text=asa_text, volume=volume ) @@ -2921,7 +2921,7 @@ def get_attributes_of_any_dataset_created( params = get_data_set_attributes( dest, size=size, - is_binary=is_binary, + binary=binary, asa_text=asa_text, volume=volume ) @@ -2934,8 +2934,8 @@ def allocate_destination_data_set( src_ds_type, dest_ds_type, dest_exists, - force, - is_binary, + replace, + binary, executable, asa_text, is_gds, @@ -2960,9 +2960,9 @@ def allocate_destination_data_set( Type of the destination data set. dest_exists : bool Whether the destination data set already exists. - force : bool + replace : bool Whether to replace an existent data set. - is_binary : bool + binary : bool Whether the data set will contain binary data. executable : bool Whether the data to copy is an executable dataset or file. @@ -2991,7 +2991,8 @@ def allocate_destination_data_set( src_name = data_set.extract_dsname(src) is_dest_empty = data_set.DataSet.is_empty(dest) if dest_exists else True - # Replacing an existing dataset only when it's not empty. We don't know whether that + # Replace in datasets. + # Reuse empty datasets when replace is not true. We don't know whether that # empty dataset was created for the user by an admin/operator, and they don't have permissions # to create new datasets. # These rules assume that source and destination types are compatible. @@ -3001,6 +3002,8 @@ def allocate_destination_data_set( if dest_exists and (is_dest_empty or dest_ds_type == "GDG"): return False, dest_params, dest + if dest_exists and is_dest_empty and not replace: + return False, dest_params, dest # Giving more priority to the parameters given by the user. # Cover case the user set executable to true to create dataset valid. if dest_data_set: @@ -3032,14 +3035,14 @@ def allocate_destination_data_set( del dest_params["purge"] del dest_params["extended"] del dest_params["fifo"] - data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) + data_set.DataSet.ensure_present(replace=replace, tmp_hlq=tmphlq, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_SEQ: volumes = [volume] if volume else None data_set.DataSet.ensure_absent(dest, volumes=volumes) if src_ds_type == "USS": # Taking the temp file when a local file was copied with sftp. - create_seq_dataset_from_file(src, dest, force, is_binary, asa_text, volume=volume, tmphlq=tmphlq) + create_seq_dataset_from_file(src, dest, replace, binary, asa_text, volume=volume, tmphlq=tmphlq) elif src_ds_type in data_set.DataSet.MVS_SEQ: # Only applying the GDS special case when we don't have an absolute name. if is_gds and not is_active_gds: @@ -3053,12 +3056,12 @@ def allocate_destination_data_set( # size for the new data set. src_attributes = datasets.list_datasets(src_name)[0] record_length = int(src_attributes.record_length) - temp_dump = dump_data_set_member_to_file(src, is_binary) + temp_dump = dump_data_set_member_to_file(src, binary) create_seq_dataset_from_file( temp_dump, dest, - force, - is_binary, + replace, + binary, asa_text, record_length=record_length, volume=volume, @@ -3084,14 +3087,14 @@ def allocate_destination_data_set( dest_params = get_data_set_attributes( dest, size, - is_binary, + binary, asa_text, record_format=record_format, record_length=record_length, type="PDSE", volume=volume ) - data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) + data_set.DataSet.ensure_present(replace=replace, tmp_hlq=tmphlq, **dest_params) elif src_ds_type == "USS": if os.path.isfile(src): # This is almost the same as allocating a sequential dataset. @@ -3099,7 +3102,7 @@ def allocate_destination_data_set( record_format = record_length = None type_ds = "PDSE" - if is_binary: + if binary: record_format = "FB" record_length = 80 else: @@ -3119,7 +3122,7 @@ def allocate_destination_data_set( dest_params = get_data_set_attributes( dest, size, - is_binary, + binary, asa_text, record_format=record_format, record_length=record_length, @@ -3133,7 +3136,7 @@ def allocate_destination_data_set( if executable: dest_params = get_data_set_attributes( - dest, size, is_binary, + dest, size, binary, record_format='U', record_length=0, type="LIBRARY", @@ -3143,13 +3146,13 @@ def allocate_destination_data_set( dest_params = get_data_set_attributes( dest, size, - is_binary, + binary, asa_text, type="PDSE", volume=volume ) - data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) + data_set.DataSet.ensure_present(replace=replace, tmp_hlq=tmphlq, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_VSAM: # If dest_data_set is not available, always create the destination using the src VSAM # as a model. @@ -3184,7 +3187,7 @@ def allocate_destination_data_set( src_ds_type, src, src_name, - is_binary, + binary, asa_text, volume ) @@ -3285,15 +3288,18 @@ def update_result(res_args, original_args): src = res_args.get("src") note = res_args.get("note") backup_name = res_args.get("backup_name") + dest_created = res_args.get("dest_created") dest_data_set_attrs = res_args.get("dest_data_set_attrs") + updated_result = dict( dest=res_args.get("dest"), - is_binary=original_args.get("is_binary"), changed=res_args.get("changed"), invocation=dict(module_args=original_args), + dest_created=dest_created, ) + if src: - updated_result["src"] = original_args.get("src") + updated_result["src"] = src if note: updated_result["note"] = note if backup_name: @@ -3367,7 +3373,7 @@ def run_module(module, arg_def): fail_json Cannot write a partitioned data set (PDS) to a USS file. fail_json - Destination already exists on the system, unable to overwrite unless force=True is specified. + Destination already exists on the system, unable to overwrite unless replace=True is specified. fail_json Unable to allocate destination data set. """ @@ -3400,7 +3406,7 @@ def run_module(module, arg_def): src = module.params.get('src') dest = module.params.get('dest') remote_src = module.params.get('remote_src') - is_binary = module.params.get('is_binary') + binary = module.params.get('binary') executable = module.params.get('executable') asa_text = module.params.get('asa_text') aliases = module.params.get('aliases') @@ -3413,8 +3419,8 @@ def run_module(module, arg_def): encoding = module.params.get('encoding') volume = module.params.get('volume') tmphlq = module.params.get('tmp_hlq') + replace = module.params.get('replace') force = module.params.get('force') - force_lock = module.params.get('force_lock') content = module.params.get('content') identical_gdg_copy = module.params.get('identical_gdg_copy', False) @@ -3526,7 +3532,7 @@ def run_module(module, arg_def): # When the destination is a dataset, we'll normalize the source # file to UTF-8 for the record length computation as Python # generally uses UTF-8 as the default encoding. - if not is_binary and not is_uss and not executable: + if not binary and not is_uss and not executable: new_src = src new_src = os.path.normpath(new_src) # Normalizing encoding when src is a USS file (only). @@ -3552,7 +3558,7 @@ def run_module(module, arg_def): ) # Creating the handler just for tagging, we're not copying yet! - copy_handler = CopyHandler(module, is_binary=is_binary) + copy_handler = CopyHandler(module, binary=binary) copy_handler._tag_file_encoding(converted_src, "UTF-8") else: if (is_src_gds and data_set.DataSet.data_set_exists(src, tmphlq=tmphlq)) or ( @@ -3702,7 +3708,7 @@ def run_module(module, arg_def): # for try to write in dest and if both src and dest are in lock. # ******************************************************************** if dest_exists and dest_ds_type != "USS": - if not force_lock: + if not force: is_dest_lock = data_set.DataSetUtils.verify_dataset_disposition(data_set=data_set.extract_dsname(dest_name), disposition="old") if is_dest_lock: module.fail_json( @@ -3800,12 +3806,12 @@ def run_module(module, arg_def): dest_member_exists, dest_ds_type, is_uss, - force, + replace, volume, tmphlq ): module.fail_json( - msg="{0} already exists on the system, unable to overwrite unless force=True is specified.".format(raw_dest), + msg="{0} already exists on the system, unable to overwrite unless replace=True is specified.".format(raw_dest), changed=False, dest=dest ) @@ -3825,8 +3831,8 @@ def run_module(module, arg_def): src_ds_type, dest_ds_type, dest_exists, - force, - is_binary, + replace, + binary, executable, asa_text, is_dest_gds, @@ -3835,6 +3841,10 @@ def run_module(module, arg_def): volume=volume, tmphlq=tmphlq ) + if res_args["changed"]: + res_args["dest_created"] = True + else: + res_args["dest_created"] = False except Exception as err: if converted_src: src = original_src @@ -3856,11 +3866,11 @@ def run_module(module, arg_def): # ******************************************************************** copy_handler = CopyHandler( module, - is_binary=is_binary, + binary=binary, executable=executable, asa_text=asa_text, backup_name=backup_name, - force_lock=force_lock, + force=force, identical_gdg_copy=module.params.get('identical_gdg_copy', False), tmphlq=tmphlq ) @@ -3878,13 +3888,13 @@ def run_module(module, arg_def): # --------------------------------------------------------------------- if is_uss: # Removing the carriage return characters - if src_ds_type == "USS" and not is_binary and not executable: + if src_ds_type == "USS" and not binary and not executable: new_src = conv_path or src if os.path.isfile(new_src): conv_path = copy_handler.remove_cr_endings(new_src) uss_copy_handler = USSCopyHandler( module, - is_binary=is_binary, + binary=binary, executable=executable, asa_text=asa_text, aliases=aliases, @@ -3895,7 +3905,10 @@ def run_module(module, arg_def): original_checksum = None if dest_exists: + res_args["dest_created"] = False original_checksum = get_file_checksum(dest) + else: + res_args["dest_created"] = True dest = uss_copy_handler.copy_to_uss( src, @@ -3904,7 +3917,7 @@ def run_module(module, arg_def): src_ds_type, src_member, member_name, - force, + replace, bool(content) ) res_args['size'] = os.stat(dest).st_size @@ -3932,7 +3945,7 @@ def run_module(module, arg_def): # --------------------------------------------------------------------- elif dest_ds_type in data_set.DataSet.MVS_SEQ: # TODO: check how ASA behaves with this - if src_ds_type == "USS" and not is_binary: + if src_ds_type == "USS" and not binary: new_src = conv_path or src conv_path = normalize_line_endings(new_src, encoding) @@ -3952,12 +3965,12 @@ def run_module(module, arg_def): pdse_copy_handler = PDSECopyHandler( module, - is_binary=is_binary, + binary=binary, executable=executable, asa_text=asa_text, aliases=aliases, backup_name=backup_name, - force_lock=force_lock, + force=force, tmphlq=tmphlq ) @@ -4015,7 +4028,7 @@ def main(): argument_spec=dict( src=dict(type='str'), dest=dict(required=True, type='str'), - is_binary=dict(type='bool', default=False), + binary=dict(type='bool', default=False), executable=dict(type='bool', default=False), asa_text=dict(type='bool', default=False), aliases=dict(type='bool', default=False, required=False), @@ -4108,8 +4121,8 @@ def main(): autoescape=dict(type='bool', default=True), ) ), + replace=dict(type='bool', default=False), force=dict(type='bool', default=False), - force_lock=dict(type='bool', default=False), mode=dict(type='str', required=False), owner=dict(type='str', required=False), group=dict(type='str', required=False), @@ -4120,19 +4133,21 @@ def main(): arg_def = dict( src=dict(arg_type='data_set_or_path', required=False), dest=dict(arg_type='data_set_or_path', required=True), - is_binary=dict(arg_type='bool', required=False, default=False), + binary=dict(arg_type='bool', required=False, default=False), executable=dict(arg_type='bool', required=False, default=False), asa_text=dict(arg_type='bool', required=False, default=False), aliases=dict(arg_type='bool', required=False, default=False), + identical_gdg_copy=dict(type='bool', default=False), content=dict(arg_type='str', required=False), backup=dict(arg_type='bool', default=False, required=False), backup_name=dict(arg_type='data_set_or_path', required=False), local_follow=dict(arg_type='bool', default=True, required=False), remote_src=dict(arg_type='bool', default=False, required=False), - checksum=dict(arg_type='str', required=False), + ignore_sftp_stderr=dict(type='bool', default=True), validate=dict(arg_type='bool', required=False), volume=dict(arg_type='str', required=False), - force_lock=dict(type='bool', default=False), + replace=dict(type='bool', default=False), + force=dict(type='bool', default=False), dest_data_set=dict( arg_type='dict', @@ -4187,7 +4202,7 @@ def main(): if ( not module.params.get("encoding").get("to") and not module.params.get("remote_src") - and not module.params.get("is_binary") + and not module.params.get("binary") and not module.params.get("executable") ): module.params["encoding"]["to"] = encode.Defaults.get_default_system_charset() @@ -4225,7 +4240,6 @@ def main(): shutil.rmtree(path) elif os.path.exists(default_path): shutil.rmtree(default_path) - res_args = update_result(res_args=res_args, original_args=module.params) module.exit_json(**res_args) except CopyOperationError as err: diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index ffd0f7ab62..d174bdbd0d 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -304,6 +304,25 @@ returned: changed and if backup=yes type: str sample: /path/file_name.2020-04-23-08-32-29-bak.tar +encoding: + description: + - Specifies which encodings the destination file or data set was + converted from and to. + type: dict + returned: always + contains: + from: + description: + - The character set of the source I(src). + type: str + sample: IBM-1047 + returned: always + to: + description: + - The destination I(dest) character set for the output that was written as. + type: str + sample: ISO8859-1 + returned: always """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( ZOAUImportError, @@ -536,10 +555,8 @@ def run_module(): dest_data_set = None convert_rc = False changed = False - - result = dict(changed=changed, src=src, dest=dest) - if backup: - result["backup_name"] = None + encoding_dict = {"from": from_encoding, "to": to_encoding} + result = dict(changed=changed, src=src, dest=dest, encoding=encoding_dict, backup_name=None) try: # Check the src is a USS file/path or an MVS data set @@ -701,9 +718,7 @@ def run_module(): eu.uss_tag_encoding(new_dest, to_encoding) changed = True - result = dict(changed=changed, src=new_src, dest=new_dest, backup_name=backup_name) - else: - result = dict(src=new_src, dest=new_dest, changed=changed, backup_name=backup_name) + result.update(dict(src=new_src, dest=new_dest, changed=changed, backup_name=backup_name)) except encode.TaggingError as e: module.fail_json( msg=e.msg, diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index 62004698aa..e0cf45186a 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -77,7 +77,7 @@ required: false default: "false" type: bool - is_binary: + binary: description: - Specifies if the file being fetched is a binary. required: false @@ -190,7 +190,7 @@ src: SOME.PDS.DATASET dest: /tmp/ flat: true - is_binary: true + binary: true - name: Fetch a UNIX file and don't validate its checksum zos_fetch: @@ -240,8 +240,10 @@ """ RETURN = r""" -file: - description: The source file path or data set on the remote machine. +src: + description: + - The source file path or data set on the remote machine. + - If the source is not found, then src will be empty. returned: success type: str sample: SOME.DATA.SET @@ -250,7 +252,7 @@ returned: success type: str sample: /tmp/SOME.DATA.SET -is_binary: +binary: description: Indicates the transfer mode that was used to fetch. returned: success type: bool @@ -266,14 +268,9 @@ returned: success type: str sample: PDSE -note: - description: Notice of module failure when C(fail_on_missing) is false. - returned: failure and fail_on_missing=false - type: str - sample: The data set USER.PROCLIB does not exist. No data was fetched. msg: - description: Message returned on failure. - returned: failure + description: Any important messages from the module. + returned: always type: str sample: The source 'TEST.DATA.SET' does not exist or is uncataloged. stdout: @@ -534,7 +531,7 @@ def _copy_vsam_to_temp_data_set(self, ds_name): return out_ds_name - def _fetch_uss_file(self, src, is_binary, encoding=None): + def _fetch_uss_file(self, src, binary, encoding=None): """Convert encoding of a USS file. Return a tuple of temporary file name containing converted data. @@ -542,7 +539,7 @@ def _fetch_uss_file(self, src, is_binary, encoding=None): ---------- src : str Source of the file. - is_binary : bool + binary : bool If is binary. encoding : str The file encoding. @@ -558,7 +555,7 @@ def _fetch_uss_file(self, src, is_binary, encoding=None): Any exception ocurred while converting encoding. """ file_path = None - if (not is_binary) and encoding: + if (not binary) and encoding: fd, file_path = tempfile.mkstemp() from_code_set = encoding.get("from") to_code_set = encoding.get("to") @@ -582,7 +579,7 @@ def _fetch_uss_file(self, src, is_binary, encoding=None): return file_path if file_path else src - def _fetch_vsam(self, src, is_binary, encoding=None): + def _fetch_vsam(self, src, binary, encoding=None): """Copy the contents of a VSAM to a sequential data set. Afterwards, copy that data set to a USS file. @@ -590,7 +587,7 @@ def _fetch_vsam(self, src, is_binary, encoding=None): ---------- src : str Source of the file. - is_binary : bool + binary : bool If is binary. encoding : str The file encoding. @@ -606,7 +603,7 @@ def _fetch_vsam(self, src, is_binary, encoding=None): Unable to delete temporary dataset. """ temp_ds = self._copy_vsam_to_temp_data_set(src) - file_path = self._fetch_mvs_data(temp_ds, is_binary, encoding=encoding) + file_path = self._fetch_mvs_data(temp_ds, binary, encoding=encoding) rc = datasets.delete(temp_ds) if rc != 0: os.remove(file_path) @@ -616,7 +613,7 @@ def _fetch_vsam(self, src, is_binary, encoding=None): return file_path - def _fetch_pdse(self, src, is_binary, temp_dir=None, encoding=None): + def _fetch_pdse(self, src, binary, temp_dir=None, encoding=None): """Copy a partitioned data set to a USS directory. If the data set is not being fetched in binary mode, encoding for all members inside the data set will be converted. @@ -625,7 +622,7 @@ def _fetch_pdse(self, src, is_binary, temp_dir=None, encoding=None): ---------- src : str Source of the dataset. - is_binary : bool + binary : bool If it is binary. temp_dir : str Parent directory for the temp directory of the copy. @@ -650,7 +647,7 @@ def _fetch_pdse(self, src, is_binary, temp_dir=None, encoding=None): "options": "" } - if is_binary: + if binary: copy_args["options"] = "-B" try: @@ -670,7 +667,7 @@ def _fetch_pdse(self, src, is_binary, temp_dir=None, encoding=None): stderr_lines=copy_exception.response.stderr_response.splitlines(), ) - if (not is_binary) and encoding: + if (not binary) and encoding: enc_utils = encode.EncodeUtils() from_code_set = encoding.get("from") to_code_set = encoding.get("to") @@ -693,7 +690,7 @@ def _fetch_pdse(self, src, is_binary, temp_dir=None, encoding=None): ) return dir_path - def _fetch_gdg(self, src, is_binary, encoding=None): + def _fetch_gdg(self, src, binary, encoding=None): """Copy a generation data group to a USS directory. If the data set is not being fetched in binary mode, encoding for all data sets inside the GDG will be converted. @@ -702,7 +699,7 @@ def _fetch_gdg(self, src, is_binary, encoding=None): ---------- src : str Source of the generation data group. - is_binary : bool + binary : bool If it is binary. encoding : str The file encoding. @@ -726,7 +723,7 @@ def _fetch_gdg(self, src, is_binary, encoding=None): if current_gds.organization in data_set.DataSet.MVS_SEQ: self._fetch_mvs_data( current_gds.name, - is_binary, + binary, temp_dir=dir_path, file_override=current_gds.name, encoding=encoding @@ -734,14 +731,14 @@ def _fetch_gdg(self, src, is_binary, encoding=None): elif current_gds.organization in data_set.DataSet.MVS_PARTITIONED: self._fetch_pdse( current_gds.name, - is_binary, + binary, temp_dir=dir_path, encoding=encoding ) return dir_path - def _fetch_mvs_data(self, src, is_binary, temp_dir=None, file_override=None, encoding=None): + def _fetch_mvs_data(self, src, binary, temp_dir=None, file_override=None, encoding=None): """Copy a sequential data set or a partitioned data set member to a USS file. @@ -749,7 +746,7 @@ def _fetch_mvs_data(self, src, is_binary, temp_dir=None, file_override=None, enc ---------- src : str Source of the dataset. - is_binary : bool + binary : bool If it is binary. temp_dir : str Parent directory for the temp directory of the copy. @@ -784,7 +781,7 @@ def _fetch_mvs_data(self, src, is_binary, temp_dir=None, file_override=None, enc "options": "" } - if is_binary: + if binary: copy_args["options"] = "-B" try: @@ -801,7 +798,7 @@ def _fetch_mvs_data(self, src, is_binary, temp_dir=None, file_override=None, enc stderr_lines=copy_exception.response.stderr_response.splitlines(), ) - if (not is_binary) and encoding: + if (not binary) and encoding: enc_utils = encode.EncodeUtils() from_code_set = encoding.get("from") to_code_set = encoding.get("to") @@ -849,7 +846,7 @@ def run_module(): dest=dict(required=True, type="path"), fail_on_missing=dict(required=False, default=True, type="bool"), flat=dict(required=False, default=False, type="bool"), - is_binary=dict(required=False, default=False, type="bool"), + binary=dict(required=False, default=False, type="bool"), use_qualifier=dict(required=False, default=False, type="bool"), validate_checksum=dict(required=False, default=True, type="bool"), encoding=dict(required=False, type="dict"), @@ -872,12 +869,12 @@ def run_module(): src=dict(arg_type="data_set_or_path", required=True), dest=dict(arg_type="path", required=True), fail_on_missing=dict(arg_type="bool", required=False, default=True), - is_binary=dict(arg_type="bool", required=False, default=False), + binary=dict(arg_type="bool", required=False, default=False), use_qualifier=dict(arg_type="bool", required=False, default=False), tmp_hlq=dict(type='qualifier_or_empty', required=False, default=None), ) - if not module.params.get("encoding").get("from") and not module.params.get("is_binary"): + if not module.params.get("encoding").get("from") and not module.params.get("binary"): mvs_src = data_set.is_data_set(src) remote_charset = encode.Defaults.get_default_system_charset() @@ -914,15 +911,30 @@ def run_module(): src = parsed_args.get("src") b_src = to_bytes(src) fail_on_missing = boolean(parsed_args.get("fail_on_missing")) - is_binary = boolean(parsed_args.get("is_binary")) + binary = boolean(parsed_args.get("binary")) encoding = module.params.get("encoding") tmphlq = module.params.get("tmp_hlq") # ********************************************************** # # Check for data set existence and determine its type # # ********************************************************** # - - res_args = dict() + encoding_dict = {"from": encoding.get("from"), "to": encoding.get("to")} + result = dict( + src=src, + dest="", + binary=binary, + checksum="", + changed=False, + data_set_type="", + remote_path="", + msg="", + stdout="", + stderr="", + stdout_lines=[], + stderr_lines=[], + rc=0, + encoding=encoding_dict, + ) src_data_set = None ds_type = None @@ -963,7 +975,7 @@ def run_module(): ) else: module.exit_json( - note=("Source '{0}' was not found. No data was fetched.".format(src)) + msg=("Source '{0}' was not found. No data was fetched.".format(src)) ) if "/" in src: @@ -989,10 +1001,10 @@ def run_module(): if ds_type in data_set.DataSet.MVS_SEQ: file_path = fetch_handler._fetch_mvs_data( src_data_set.name, - is_binary, + binary, encoding=encoding ) - res_args["remote_path"] = file_path + result["remote_path"] = file_path # ********************************************************** # # Fetch a partitioned data set or one of its members # @@ -1002,14 +1014,14 @@ def run_module(): if is_member: file_path = fetch_handler._fetch_mvs_data( src_data_set.name, - is_binary, + binary, encoding=encoding ) - res_args["remote_path"] = file_path + result["remote_path"] = file_path else: - res_args["remote_path"] = fetch_handler._fetch_pdse( + result["remote_path"] = fetch_handler._fetch_pdse( src_data_set.name, - is_binary, + binary, encoding=encoding ) @@ -1024,10 +1036,10 @@ def run_module(): ) file_path = fetch_handler._fetch_uss_file( src, - is_binary, + binary, encoding=encoding ) - res_args["remote_path"] = file_path + result["remote_path"] = file_path # ********************************************************** # # Fetch a VSAM data set # @@ -1036,35 +1048,35 @@ def run_module(): elif ds_type in data_set.DataSet.MVS_VSAM: file_path = fetch_handler._fetch_vsam( src_data_set.name, - is_binary, + binary, encoding=encoding ) - res_args["remote_path"] = file_path + result["remote_path"] = file_path # ********************************************************** # # Fetch a GDG # # ********************************************************** # elif ds_type == "GDG": - res_args["remote_path"] = fetch_handler._fetch_gdg( + result["remote_path"] = fetch_handler._fetch_gdg( src_data_set.name, - is_binary, + binary, encoding=encoding ) if ds_type == "USS": - res_args["file"] = src + result["src"] = src else: - res_args["file"] = src_data_set.name + result["src"] = src_data_set.name # Removing the HLQ since the user is probably not expecting it. The module # hasn't returned it ever since it was originally written. Changes made to # add GDG/GDS support started leaving the HLQ behind in the file name. if hlq: - res_args["file"] = res_args["file"].replace(f"{hlq}.", "") + result["src"] = result["src"].replace(f"{hlq}.", "") - res_args["ds_type"] = ds_type - module.exit_json(**res_args) + result["ds_type"] = ds_type + module.exit_json(**result) class ZOSFetchError(Exception): @@ -1094,7 +1106,7 @@ def __init__(self, msg, rc="", stdout="", stderr="", stdout_lines="", stderr_lin stdout_lines=stdout_lines, stderr_lines=stderr_lines, ) - super().__init__(self.msg) + super().__init__(msg) def main(): diff --git a/plugins/modules/zos_find.py b/plugins/modules/zos_find.py index 68319b9171..a25785c450 100644 --- a/plugins/modules/zos_find.py +++ b/plugins/modules/zos_find.py @@ -66,6 +66,8 @@ Multiple patterns can be specified using a list. - The pattern can be a regular expression. - If the pattern is a regular expression, it must match the full data set name. + - To exclude members, the regular expression or pattern must be enclosed in parentheses. + This expression can be used alongside a pattern to exclude data set names. aliases: - exclude type: list @@ -78,7 +80,6 @@ names that match at least one of the patterns specified. Multiple patterns can be specified using a list. - This parameter expects a list, which can be either comma separated or YAML. - - If C(pds_patterns) is provided, C(patterns) must be member patterns. - When searching for members within a PDS/PDSE, pattern can be a regular expression. type: list elements: str @@ -92,17 +93,6 @@ - Filtering by size is currently only valid for sequential and partitioned data sets. required: false type: str - pds_patterns: - description: - - List of PDS/PDSE to search. Wildcard is possible. - - Required when searching for data set members. - - Valid only for C(nonvsam) resource types. Otherwise ignored. - aliases: - - pds_paths - - pds_pattern - type: list - elements: str - required: false resource_type: description: - The types of resources to search. @@ -218,6 +208,22 @@ EXAMPLES = r""" +- name: Exclude all members starting with characters 'TE' in a given list datasets patterns + zos_find: + excludes: '(^te.*)' + patterns: + - IMSTEST.TEST.* + - IMSTEST.USER.* + - USER.*.LIB + +- name: Exclude datasets that includes 'DATA' and members starting with characters 'MEM' in a given list datasets patterns + zos_find: + excludes: '^.*DATA.*(^MEM.*)' + patterns: + - IMSTEST.*.TEST + - IMSTEST.*.* + - USER.*.LIB + - name: Find all data sets with HLQ 'IMS.LIB' or 'IMSTEST.LIB' that contain the word 'hello' zos_find: patterns: @@ -238,14 +244,6 @@ contains: 'hello' excludes: '.*TEST' -- name: Find all members starting with characters 'TE' in a given list of PDS patterns - zos_find: - patterns: '^te.*' - pds_patterns: - - IMSTEST.TEST.* - - IMSTEST.USER.* - - USER.*.LIB - - name: Find all data sets greater than 2MB and allocated in one of the specified volumes zos_find: patterns: 'USER.*' @@ -344,7 +342,6 @@ import math import json -from copy import deepcopy from re import match as fullmatch @@ -418,7 +415,7 @@ def content_filter(module, patterns, content): return filtered_data_sets -def data_set_filter(module, pds_paths, patterns): +def data_set_filter(module, patterns): """ Find data sets that match any pattern in a list of patterns. Parameters @@ -442,7 +439,6 @@ def data_set_filter(module, pds_paths, patterns): Non-zero return code received while executing ZOAU shell command 'dls'. """ filtered_data_sets = dict(ps=set(), pds=dict(), searched=0) - patterns = pds_paths or patterns for pattern in patterns: rc, out, err = _dls_wrapper(pattern, list_details=True) if rc != 0: @@ -462,66 +458,40 @@ def data_set_filter(module, pds_paths, patterns): result = line.split() if result: if result[1] == "PO": - if pds_paths: - mls_rc, mls_out, mls_err = module.run_command( - "mls '{0}(*)'".format(result[0]), errors='replace' - ) - if mls_rc == 2: - filtered_data_sets["pds"][result[0]] = {} - else: - filtered_data_sets["pds"][result[0]] = \ - set(filter(None, mls_out.splitlines())) - else: + mls_rc, mls_out, mls_err = module.run_command( + f"mls '{result[0]}(*)'", errors='replace' + ) + if mls_rc == 2: filtered_data_sets["pds"][result[0]] = {} + else: + filtered_data_sets["pds"][result[0]] = \ + set(filter(None, mls_out.splitlines())) else: filtered_data_sets["ps"].add(result[0]) return filtered_data_sets -def pds_filter(module, pds_dict, member_patterns, excludes=None): +def filter_members(module, members, excludes): """ Return all PDS/PDSE data sets whose members match any of the patterns in the given list of member patterns. - Parameters ---------- module : AnsibleModule The Ansible module object being used in the module. - pds_dict : dict[str, str] - A dictionary where each key is the name of - of the PDS/PDSE and the value is a list of - members belonging to the PDS/PDSE. - member_patterns : list - A list of member patterns to search for. - + member : set + A list of member patterns to on it. + excludes : list + The str value to filter members. Returns ------- dict[str, set[str]] Filtered PDS/PDSE with corresponding members. """ - filtered_pds = dict() - for pds, members in pds_dict.items(): - for m in members: - for mem_pat in member_patterns: - if _match_regex(module, mem_pat, m): - try: - filtered_pds[pds].add(m) - except KeyError: - filtered_pds[pds] = set({m}) - # ************************************************************************ - # Exclude any member that matches a given pattern in 'excludes'. - # Changes will be made to 'filtered_pds' each iteration. Therefore, - # iteration should be performed over a copy of 'filtered_pds'. Because - # Python performs a shallow copy when copying a dictionary, a deep copy - # should be performed. - # ************************************************************************ - if excludes: - for pds, members in deepcopy(filtered_pds).items(): - for m in members: - for ex_pat in excludes: - if _match_regex(module, ex_pat, m): - filtered_pds[pds].remove(m) - break - return filtered_pds + filtered_members = { + member for member in members + if not any(_match_regex(module, exclude, member) for exclude in excludes) + } + return filtered_members def vsam_filter(module, patterns, vsam_types, age=None, excludes=None): @@ -817,7 +787,6 @@ def migrated_nonvsam_filter(module, data_sets, excludes): # Fetch only active datasets init_filtered_data_sets = data_set_filter( module, - None, [ds] ) active_datasets = \ @@ -906,6 +875,37 @@ def exclude_data_sets(module, data_set_list, excludes): return data_set_list +def get_members_to_exclude(excludes): + """Get from the excludes str any subject that is isndie () to get members to exclude + + Args + ---- + excludes : str + String of exlucions of the find operation including members + + Returns + ------- + members_to_exclude [list] + The patters of members to be exlude from the list + datasets_to_exclude [list] + The patters of datasets to be exlude from the list + """ + members_to_exclude = [] + datasets_to_exclude = [] + for exclude in excludes: + match = re.search(r'\(([^)]+)\)', exclude) + if match: + members = match.group(1) + datasets = exclude[:match.start()] + exclude[match.end():] + members_to_exclude.append(members) + if datasets: + datasets_to_exclude.append(datasets) + else: + if exclude: + datasets_to_exclude.append(exclude) + return members_to_exclude, datasets_to_exclude + + def _age_filter(ds_date, now, age): """Determine whether a given date is older than 'age'. @@ -1237,11 +1237,6 @@ def run_module(module): excludes = module.params.get('excludes') or module.params.get('exclude') patterns = module.params.get('patterns') size = module.params.get('size') - pds_paths = ( - module.params.get('pds_paths') - or module.params.get('pds_patterns') - or module.params.get('pds_pattern') - ) resource_type = module.params.get('resource_type') or module.params.get('resource_types') resource_type = [type.upper() for type in resource_type] volume = module.params.get('volume') or module.params.get('volumes') @@ -1260,6 +1255,9 @@ def run_module(module): filtered_migrated_types = set() vsam_migrated_types = set() + excludes_datasets = exclude_members = [] + if excludes: + exclude_members, excludes_datasets = get_members_to_exclude(excludes) for type in resource_type: if type in vsam_types: filtered_resource_types.add("VSAM") @@ -1294,7 +1292,7 @@ def run_module(module): for res_type in filtered_resource_types: examined = 0 filtered_data_sets = list() - init_filtered_data_sets = filtered_pds = dict() + init_filtered_data_sets = dict() if res_type == "MIGRATED": migrated_data_sets = list() for mtype in filtered_migrated_types: @@ -1307,25 +1305,18 @@ def run_module(module): if contains: init_filtered_data_sets = content_filter( module, - pds_paths if pds_paths else patterns, + patterns, contains ) else: init_filtered_data_sets = data_set_filter( module, - pds_paths, patterns ) - if pds_paths: - filtered_pds = pds_filter( - module, init_filtered_data_sets.get("pds"), patterns, excludes=excludes - ) - filtered_data_sets = list(filtered_pds.keys()) - else: - filtered_data_sets = \ - list(init_filtered_data_sets.get("ps").union(set(init_filtered_data_sets['pds'].keys()))) - if excludes: - filtered_data_sets = exclude_data_sets(module, filtered_data_sets, excludes) + filtered_data_sets = \ + list(init_filtered_data_sets.get("ps").union(set(init_filtered_data_sets['pds'].keys()))) + if len(excludes_datasets) > 0: + filtered_data_sets = exclude_data_sets(module, filtered_data_sets, excludes_datasets) # Filter data sets by age or size if size or age: filtered_data_sets = data_set_attribute_filter( @@ -1339,13 +1330,14 @@ def run_module(module): filtered_data_sets, examined = vsam_filter(module, patterns, vsam_resource_types, age=age, excludes=excludes) elif res_type == "GDG": filtered_data_sets = gdg_filter(module, patterns, limit, empty, fifo, purge, scratch, extended, excludes) - if filtered_data_sets: for ds in filtered_data_sets: if ds: if res_type == "NONVSAM": - members = filtered_pds.get(ds) or init_filtered_data_sets['pds'].get(ds) + members = init_filtered_data_sets['pds'].get(ds) if members: + if len(exclude_members) > 0: + members = filter_members(module, members, exclude_members) res_args['data_sets'].append( dict(name=ds, members=members, type=res_type) ) @@ -1389,12 +1381,6 @@ def main(): required=True ), size=dict(type="str", required=False), - pds_patterns=dict( - type="list", - elements="str", - required=False, - aliases=["pds_pattern", "pds_paths"] - ), resource_type=dict( type="list", elements="str", @@ -1438,11 +1424,6 @@ def main(): excludes=dict(arg_type="list", required=False, aliases=["exclude"]), patterns=dict(arg_type="list", required=True), size=dict(arg_type="str", required=False), - pds_patterns=dict( - arg_type="list", - required=False, - aliases=["pds_pattern", "pds_paths"] - ), resource_type=dict( arg_type="list", required=False, diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index b2a7cdcaa4..1a946d1d55 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -31,7 +31,7 @@ such as "TCP*" or "*". - The owner can be specific such as "IBMUSER", or one that uses a pattern like "*". - - If there is no ddname, or if ddname="?", output of all the ddnames under + - If there is no dd_name, or if dd_name="?", output of all the dds under the given job will be displayed. - If SYSIN DDs are needed, I(sysin_dd) should be set to C(true). version_added: "1.0.0" @@ -56,7 +56,7 @@ - The owner who ran the job. (e.g "IBMUSER", "*") type: str required: false - ddname: + dd_name: description: - Data definition name (show only this DD on a found job). (e.g "JESJCL", "?") @@ -82,16 +82,16 @@ """ EXAMPLES = r""" -- name: Job output with ddname +- name: Job output with dd_name zos_job_output: job_id: "STC02560" - ddname: "JESMSGLG" + dd_name: "JESMSGLG" -- name: JES Job output without ddname +- name: JES Job output without dd_name zos_job_output: job_id: "STC02560" -- name: JES Job output with all ddnames +- name: JES Job output with all dd_name zos_job_output: job_id: "STC*" job_name: "*" @@ -151,7 +151,7 @@ sample: "STL1" class: description: - Identifies the data set used in a system output data set, usually called a sysout data set. + Identifies the data set used in a system output data set, usually called a sysout data set. type: str sample: content_type: @@ -181,15 +181,15 @@ it represents the time elapsed from the job execution start and current time. type: str sample: 00:00:10 - ddnames: + dds: description: - Data definition names. + Data definition names. type: list elements: dict contains: - ddname: + dd_name: description: - Data definition name. + Data definition name. type: str sample: JESMSGLG record_count: @@ -199,7 +199,7 @@ sample: 17 id: description: - The file ID. + The file ID. type: str sample: 2 stepname: @@ -210,8 +210,8 @@ sample: JES2 procstep: description: - Identifies the set of statements inside JCL grouped together to - perform a particular function. + Identifies the set of statements inside JCL grouped together to + perform a particular function. type: str sample: PROC1 byte_count: @@ -221,7 +221,7 @@ sample: 574 content: description: - The ddname content. + The dd content. type: list elements: str sample: @@ -239,7 +239,7 @@ " 5 //SYSUT1 DD * ", " 6 //SYSUT2 DD SYSOUT=* ", " 7 // " - ] + ] job_class: description: Job class for this job. @@ -273,7 +273,7 @@ sample: "IEBGENER" ret_code: description: - Return code output collected from job log. + Return code output collected from job log. type: dict contains: msg: @@ -289,48 +289,48 @@ sample: S0C4 msg_txt: description: - Returns additional information related to the job. + Returns additional information related to the job. type: str sample: "No job can be located with this job name: HELLO" code: description: - Return code converted to integer value (when possible). + Return code converted to integer value (when possible). type: int sample: 00 - steps: - description: - Series of JCL steps that were executed and their return codes. - type: list - elements: dict - contains: - step_name: - description: - Name of the step shown as "was executed" in the DD section. - type: str - sample: "STEP0001" - step_cc: - description: - The CC returned for this step in the DD section. - type: int - sample: 0 sample: ret_code: { "code": 0, "msg": "CC 0000", "msg_code": "0000", "msg_txt": "", - "steps": [ - { "step_name": "STEP0001", - "step_cc": 0 - } - ] } + steps: + description: + Series of JCL steps that were executed and their return codes. + type: list + elements: dict + contains: + step_name: + description: + Name of the step shown as "was executed" in the DD section. + type: str + sample: "STEP0001" + step_cc: + description: + The CC returned for this step in the DD section. + type: int + sample: 0 + sample: [ + { "step_name": "STEP0001", + "step_cc": 0 + } + ] sample: [ { "class": "R", "content_type": "JOB", - "ddnames": [ + "dds": [ { "byte_count": "775", "content": [ @@ -352,7 +352,7 @@ "- 6 SYSOUT SPOOL KBYTES", "- 0.00 MINUTES EXECUTION TIME" ], - "ddname": "JESMSGLG", + "dd_name": "JESMSGLG", "id": "2", "procstep": "", "record_count": "17", @@ -376,7 +376,7 @@ " 6 //SYSUT2 DD SYSOUT=* ", " 7 // " ], - "ddname": "JESJCL", + "dd_name": "JESJCL", "id": "3", "procstep": "", "record_count": "14", @@ -405,7 +405,7 @@ " IEF033I JOB/HELLO /STOP 2020049.1025 ", " CPU: 0 HR 00 MIN 00.00 SEC SRB: 0 HR 00 MIN 00.00 SEC " ], - "ddname": "JESYSMSG", + "dd_name": "JESYSMSG", "id": "4", "procstep": "", "record_count": "19", @@ -419,7 +419,7 @@ " ", " PROCESSING ENDED AT EOD " ], - "ddname": "SYSPRINT", + "dd_name": "SYSPRINT", "id": "102", "procstep": "", "record_count": "4", @@ -430,7 +430,7 @@ "content": [ " HELLO, WORLD " ], - "ddname": "SYSUT2", + "dd_name": "SYSUT2", "id": "103", "procstep": "", "record_count": "1", @@ -451,12 +451,12 @@ "msg": "CC 0000", "msg_code": "0000", "msg_txt": "", - "steps": [ - { "step_name": "STEP0001", - "step_cc": 0 - } - ] }, + "steps": [ + { "step_name": "STEP0001", + "step_cc": 0 + } + ], "system": "STL1", "subsystem": "STL1", "cpu_time": 1414, @@ -468,7 +468,7 @@ description: Indicates if any changes were made during module operation type: bool - returned: on success + returned: always """ @@ -531,6 +531,9 @@ def run_module(): stderr=str(err) ) + results = {} + results["changed"] = False + job_id = module.params.get("job_id") job_name = module.params.get("job_name") owner = module.params.get("owner") @@ -538,7 +541,7 @@ def run_module(): sysin = module.params.get("sysin_dd") if not job_id and not job_name and not owner: - module.fail_json(msg="Please provide a job_id or job_name or owner") + module.fail_json(msg="Please provide a job_id or job_name or owner", stderr="", **results) try: results = {} @@ -546,13 +549,12 @@ def run_module(): results["changed"] = False except zoau_exceptions.JobFetchException as fetch_exception: module.fail_json( - msg="ZOAU exception", - rc=fetch_exception.response.rc, - stdout=fetch_exception.response.stdout_response, + msg=f"ZOAU exception {fetch_exception.response.stdout_response} rc {fetch_exception.response.rc}", stderr=fetch_exception.response.stderr_response, + changed=False ) except Exception as e: - module.fail_json(msg=repr(e)) + module.fail_json(msg=repr(e), **results) module.exit_json(**results) diff --git a/plugins/modules/zos_job_query.py b/plugins/modules/zos_job_query.py index a9d237dfc0..398768c3bf 100644 --- a/plugins/modules/zos_job_query.py +++ b/plugins/modules/zos_job_query.py @@ -108,6 +108,7 @@ True if the state was changed, otherwise False. returned: always type: bool + sample: True jobs: description: The output information for a list of jobs matching specified criteria. @@ -153,21 +154,21 @@ The job entry subsystem that MVS uses to do work. type: str sample: STL1 - cpu_time: + origin_node: description: - Sum of the CPU time used by each job step, in microseconds. - type: int - sample: 5 + Origin node that submitted the job. + type: str + sample: "STL1" execution_node: description: Execution node that picked the job and executed it. type: str sample: "STL1" - origin_node: + cpu_time: description: - Origin node that submitted the job. - type: str - sample: "STL1" + Sum of the CPU time used by each job step, in microseconds. + type: int + sample: 5 ret_code: description: Return code output collected from job log. @@ -210,19 +211,35 @@ The CC returned for this step in the DD section. type: int sample: 0 - sample: ret_code: { "msg": "CC 0000", "msg_code": "0000", "msg_txt": "", - "code": 0, - "steps": [ - { "step_name": "STEP0001", - "step_cc": 0 - } - ] + "code": 0 } + steps: + description: + Series of JCL steps that were executed and their return codes. + type: list + elements: dict + contains: + step_name: + description: + Name of the step shown as "was executed" in the DD section. + type: str + sample: "STEP0001" + step_cc: + description: + The CC returned for this step in the DD section. + type: int + sample: 0 + sample: + "steps": [ + { "step_name": "STEP0001", + "step_cc": 0 + } + ] job_class: description: Job class for this job. @@ -277,14 +294,20 @@ "job_name": "LINKJOB", "owner": "ADMIN", "job_id": "JOB01427", - "ret_code": "null", - "job_class": "K", "content_type": "JOB", - "svc_class": "?", + "ret_code": { "msg" : "CC", "msg_code" : "0000", "code" : "0", msg_txt : "CC" }, + "steps": [ + { "step_name": "STEP0001", + "step_cc": 0 + } + ], + "job_class": "STC", + "svc_class": "null", "priority": 1, "asid": 0, "creation_date": "2023-05-03", "creation_time": "12:13:00", + "program_name": "BPXBATCH", "queue_position": 3, "execution_time": "00:00:02", "system": "STL1", @@ -298,7 +321,8 @@ "owner": "ADMIN", "job_id": "JOB16577", "content_type": "JOB", - "ret_code": { "msg": "CANCELED", "code": "null" }, + "ret_code": { "msg" : "CANCELED", "msg_code" : "null", "code" : "null", msg_txt : "CANCELED" }, + "steps" : [], "job_class": "A", "svc_class": "E", "priority": 0, @@ -307,6 +331,7 @@ "creation_time": "12:14:00", "queue_position": 0, "execution_time": "00:00:03", + "program_name": "null", "system": "STL1", "subsystem": "STL1", "cpu_time": 1414, @@ -314,7 +339,7 @@ "origin_node": "STL1" }, ] -message: +msg: description: Message returned on failure. type: str @@ -349,7 +374,7 @@ def run_module(): job_id=dict(type="str", required=False), ) - result = dict(changed=False, message="") + result = dict(changed=False) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) @@ -379,6 +404,7 @@ def run_module(): jobs_raw = query_jobs(name, id, owner) if jobs_raw: jobs = parsing_jobs(jobs_raw) + result["changed"] = True else: jobs = None @@ -436,8 +462,8 @@ def parsing_jobs(jobs_raw): Parsed jobs. """ jobs = [] - ret_code = {} for job in jobs_raw: + ret_code = job.get("ret_code") # Easier to see than checking for an empty string, JOB NOT FOUND was # replaced with None in the jobs.py and msg_txt field describes the job query instead if job.get("ret_code") is None: @@ -449,30 +475,32 @@ def parsing_jobs(jobs_raw): if "AC" in status_raw: # the job is active - ret_code = None + ret_code["msg"] = None + ret_code["msg_code"] = None + ret_code["code"] = None + ret_code["msg_txt"] = None + elif "CC" in status_raw: # status = 'Completed normally' - ret_code = { - "msg": status_raw, - "code": job.get("ret_code").get("code"), - } + ret_code["msg"] = status_raw + elif "ABEND" in status_raw: # status = 'Ended abnormally' - ret_code = { - "msg": status_raw, - "code": job.get("ret_code").get("code"), - } + ret_code["msg"] = status_raw + elif "ABENDU" in status_raw: # status = 'Ended abnormally' - ret_code = {"msg": status_raw, "code": job.get("ret_code").get("code")} + ret_code["msg"] = status_raw elif "CANCELED" in status_raw or "JCLERR" in status_raw or "JCL ERROR" in status_raw or "JOB NOT FOUND" in status_raw: # status = status_raw - ret_code = {"msg": status_raw, "code": None} + ret_code["msg"] = status_raw + ret_code["code"] = None + ret_code["msg_code"] = None else: # status = 'Unknown' - ret_code = {"msg": status_raw, "code": job.get("ret_code").get("code")} + ret_code["msg"] = status_raw job_dict = { "job_name": job.get("job_name"), @@ -485,6 +513,7 @@ def parsing_jobs(jobs_raw): "execution_node": job.get("execution_node"), "origin_node": job.get("origin_node"), "ret_code": ret_code, + "steps": job.get("steps"), "job_class": job.get("job_class"), "svc_class": job.get("svc_class"), "priority": job.get("priority"), diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 0917fc0079..88808d5013 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -46,35 +46,31 @@ - When using a generation data set, only already created generations are valid. If either the relative name is positive, or negative but not found, the module will fail. - location: + remote_src: required: false - default: data_set - type: str - choices: - - data_set - - uss - - local + default: true + type: bool description: - - The JCL location. Supported choices are C(data_set), C(uss) or C(local). - - C(data_set) can be a PDS, PDSE, sequential data set, or a generation data set. - - C(uss) means the JCL location is located in UNIX System Services (USS). - - C(local) means locally to the Ansible control node. - wait_time_s: + - If set to C(false), the module searches for C(src) in the controller node. + - If set to C(true), the module searches for the file C(src) in the managed node. + wait_time: required: false default: 10 type: int description: - - Option I(wait_time_s) is the total time that module + - Option I(wait_time) is the total time that module L(zos_job_submit,./zos_job_submit.html) will wait for a submitted job to complete. The time begins when the module is executed on the managed node. - - I(wait_time_s) is measured in seconds and must be a value greater than 0 + - I(wait_time) is measured in seconds and must be a value greater than 0 and less than 86400. - - The module can submit and forget jobs by setting I(wait_time_s) to 0. This way the + - The module can submit and forget jobs by setting I(wait_time) to 0. This way the module will not try to retrieve the job details other than job id. Job details and contents can be retrieved later by using L(zos_job_query,./zos_job_query.html) or L(zos_job_output,./zos_job_output.html) if needed. + - If I(remote_src=False) and I(wait_time=0), the module will not clean the copy + of the file on the remote system, to avoid problems with job submission. max_rc: required: false type: int @@ -86,7 +82,7 @@ type: bool description: - Whether to print the DD output. - - If false, an empty list will be returned in the ddnames field. + - If false, an empty list will be returned in the dds field. volume: required: false type: str @@ -96,12 +92,12 @@ - When configured, the L(zos_job_submit,./zos_job_submit.html) will try to catalog the data set for the volume serial. If it is not able to, the module will fail. - - Ignored for I(location=uss) and I(location=local). + - Ignored for I(remote_src=False). encoding: description: - Specifies which encoding the local JCL file should be converted from and to, before submitting the job. - - This option is only supported for when I(location=local). + - This option is only supported for when I(remote_src=False). - If this parameter is not provided, and the z/OS systems default encoding can not be identified, the JCL file will be converted from UTF-8 to IBM-1047 by default, otherwise the module will detect the z/OS system @@ -194,13 +190,13 @@ description: Total duration time of the job execution, if it has finished. type: str sample: 00:00:10 - ddnames: + dds: description: Data definition names. type: list elements: dict contains: - ddname: + dd_name: description: Data definition name. type: str @@ -234,7 +230,7 @@ sample: 574 content: description: - The ddname content. + The dd content. type: list elements: str sample: @@ -299,34 +295,35 @@ is the case of a job that errors or is active. type: int sample: 0 - steps: - description: - Series of JCL steps that were executed and their return codes. - type: list - elements: dict - contains: - step_name: - description: - Name of the step shown as "was executed" in the DD section. - type: str - sample: "STEP0001" - step_cc: - description: - The CC returned for this step in the DD section. - type: int - sample: 0 sample: ret_code: { "code": 0, "msg": "CC 0000", "msg_code": "0000", "msg_txt": "", - "steps": [ - { "step_name": "STEP0001", - "step_cc": 0 - }, - ] } + steps: + description: + Series of JCL steps that were executed and their return codes. + type: list + elements: dict + contains: + step_name: + description: + Name of the step shown as "was executed" in the DD section. + type: str + sample: "STEP0001" + step_cc: + description: + The CC returned for this step in the DD section. + type: int + sample: 0 + sample: + "steps": [ + { "step_name": "STEP0001", + "step_cc": 0 + } + ] job_class: description: Job class for this job. @@ -370,12 +367,12 @@ sample: "IEBGENER" system: description: - The job entry system that MVS uses to do work. + The job entry system that MVS uses to do work. type: str sample: STL1 subsystem: description: - The job entry subsystem that MVS uses to do work. + The job entry subsystem that MVS uses to do work. type: str sample: STL1 cpu_time: @@ -393,13 +390,11 @@ Origin node that submitted the job. type: str sample: "STL1" - sample: [ { - "class": "K", "content_type": "JOB", - "ddnames": [ + "dds": [ { "byte_count": "677", "content": [ @@ -420,7 +415,7 @@ "- 12 SYSOUT SPOOL KBYTES", "- 0.00 MINUTES EXECUTION TIME" ], - "ddname": "JESMSGLG", + "dd_name": "JESMSGLG", "id": "2", "procstep": "", "record_count": "16", @@ -477,7 +472,7 @@ " 15 ++SYSPRINT DD SYSOUT=* ", " ++* " ], - "ddname": "JESJCL", + "dd_name": "JESJCL", "id": "3", "procstep": "", "record_count": "47", @@ -531,7 +526,7 @@ " IEF033I JOB/DBDGEN00/STOP 2020073.1250 ", " CPU: 0 HR 00 MIN 00.03 SEC SRB: 0 HR 00 MIN 00.00 SEC " ], - "ddname": "JESYSMSG", + "dd_name": "JESYSMSG", "id": "4", "procstep": "", "record_count": "44", @@ -586,7 +581,7 @@ " **** END OF MESSAGE SUMMARY REPORT **** ", " " ], - "ddname": "SYSPRINT", + "dd_name": "SYSPRINT", "id": "102", "procstep": "L", "record_count": "45", @@ -595,18 +590,17 @@ ], "job_id": "JOB00361", "job_name": "DBDGEN00", - "owner": "OMVSADM", "ret_code": { "code": 0, "msg": "CC 0000", "msg_code": "0000", "msg_txt": "", - "steps": [ - { "step_name": "DLORD6", - "step_cc": 0 - } - ] }, + "steps": [ + { "step_name": "DLORD6", + "step_cc": 0 + } + ], "job_class": "K", "execution_time": "00:00:10", "svc_class": "?", @@ -629,19 +623,19 @@ - name: Submit JCL in a PDSE member. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: data_set + remote_src: true register: response - name: Submit JCL in USS with no DDs in the output. zos_job_submit: src: /u/tester/demo/sample.jcl - location: uss + remote_src: true return_output: false - name: Convert local JCL to IBM-037 and submit the job. zos_job_submit: src: /Users/maxy/ansible-playbooks/provision/sample.jcl - location: local + remote_src: false encoding: from: ISO8859-1 to: IBM-037 @@ -649,36 +643,36 @@ - name: Submit JCL in an uncataloged PDSE on volume P2SS01. zos_job_submit: src: HLQ.DATA.LLQ(SAMPLE) - location: data_set + remote_src: true volume: P2SS01 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: data_set - wait_time_s: 30 + remote_src: true + wait_time: 30 - name: Submit a long running PDS job and wait up to 30 seconds for completion. zos_job_submit: src: HLQ.DATA.LLQ(LONGRUN) - location: data_set - wait_time_s: 30 + remote_src: true + wait_time: 30 - name: Submit JCL and set the max return code the module should fail on to 16. zos_job_submit: src: HLQ.DATA.LLQ - location: data_set + remote_src: true max_rc: 16 - name: Submit JCL from the latest generation data set in a generation data group. zos_job_submit: src: HLQ.DATA.GDG(0) - location: data_set + remote_src: true - name: Submit JCL from a previous generation data set in a generation data group. zos_job_submit: src: HLQ.DATA.GDG(-2) - location: data_set + remote_src: true """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.encode import ( @@ -821,7 +815,7 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, start_ti job_fetched = jobs.fetch_multiple(job_submitted.job_id)[0] job_fetch_rc = job_fetched.return_code job_fetch_status = job_fetched.status - # Allow for jobs that need more time to be fectched to run the wait_time_s + # Allow for jobs that need more time to be fectched to run the wait_time except zoau_exceptions.JobFetchException as err: if duration >= timeout: raise err @@ -886,52 +880,6 @@ def submit_src_jcl(module, src, src_name=None, timeout=0, is_unix=True, start_ti return job_submitted.job_id if job_submitted else None, duration -def build_return_schema(result): - """ Builds return values schema with empty values. - - Parameters - ---------- - result : dict - Dictionary used to return values at execution finalization. - - Returns - ------- - dict - Dictionary used to return values at execution finalization. - """ - result = { - "jobs": [], - "job_id": None, - "job_name": None, - "duration": None, - "execution_time": None, - "ddnames": { - "ddname": None, - "record_count": None, - "id": None, - "stepname": None, - "procstep": None, - "byte_count": None, - "content": [], - }, - "ret_code": { - "code": None, - "msg": None, - "msg_code": None, - "msg_txt": None, - "steps": [], - }, - "job_class": None, - "svc_class": None, - "priority": None, - "asid": None, - "creation_date": None, - "queue_position": None, - "program_name": None, - } - return result - - def run_module(): """Initialize module. @@ -940,14 +888,14 @@ def run_module(): fail_json Parameter verification failed. fail_json - The value for option 'wait_time_s' is not valid. + The value for option 'wait_time' is not valid. """ module_args = dict( src=dict(type="str", required=True), - location=dict( - type="str", - default="data_set", - choices=["data_set", "uss", "local"], + remote_src=dict( + type="bool", + default=True, + required=False ), encoding=dict( type="dict", @@ -967,7 +915,7 @@ def run_module(): ), volume=dict(type="str", required=False), return_output=dict(type="bool", required=False, default=True), - wait_time_s=dict(type="int", default=10), + wait_time=dict(type="int", default=10), max_rc=dict(type="int", required=False), use_template=dict(type='bool', default=False), template_parameters=dict( @@ -1015,10 +963,10 @@ def run_module(): arg_defs = dict( src=dict(arg_type="data_set_or_path", required=True), - location=dict( - arg_type="str", - default="data_set", - choices=["data_set", "uss", "local"], + remote_src=dict( + arg_type="bool", + default=True, + required=False ), from_encoding=dict( arg_type="encoding", default=Defaults.DEFAULT_ASCII_CHARSET, required=False), @@ -1027,7 +975,7 @@ def run_module(): ), volume=dict(arg_type="volume", required=False), return_output=dict(arg_type="bool", default=True), - wait_time_s=dict(arg_type="int", required=False, default=10), + wait_time=dict(arg_type="int", required=False, default=10), max_rc=dict(arg_type="int", required=False), ) @@ -1043,22 +991,21 @@ def run_module(): msg="Parameter verification failed", stderr=str(err)) # Extract values from set module options - location = parsed_args.get("location") + remote_src = parsed_args.get("remote_src") volume = parsed_args.get("volume") src = parsed_args.get("src") return_output = parsed_args.get("return_output") - wait_time_s = parsed_args.get("wait_time_s") + wait_time = parsed_args.get("wait_time") max_rc = parsed_args.get("max_rc") - temp_file = parsed_args.get("src") if location == "local" else None + temp_file = parsed_args.get("src") if not remote_src else None # Default 'changed' is False in case the module is not able to execute result = dict(changed=False) # Builds return value schema to make sure we return the return values schema. - result = build_return_schema(result) - if wait_time_s < 0 or wait_time_s > MAX_WAIT_TIME_S: + if wait_time < 0 or wait_time > MAX_WAIT_TIME_S: result["failed"] = True - result["msg"] = ("The value for option 'wait_time_s' is not valid, it must " + result["msg"] = ("The value for option 'wait_time' is not valid, it must " "be greater than 0 and less than {0}.".format(str(MAX_WAIT_TIME_S))) module.fail_json(**result) @@ -1066,55 +1013,61 @@ def run_module(): duration = 0 start_time = timer() - if location == "data_set": - # Resolving a relative GDS name and escaping special symbols if needed. - src_data = data_set.MVSDataSet(src) - - # Checking that the source is actually present on the system. - if volume is not None: - volumes = [volume] - # Get the data set name to catalog it. - src_ds_name = data_set.extract_dsname(src_data.name) - present, changed = DataSet.attempt_catalog_if_necessary(src_ds_name, volumes) - - if not present: - module.fail_json( - msg=(f"Unable to submit job {src_data.name} because the data set could " - f"not be cataloged on the volume {volume}.") - ) - elif data_set.is_member(src_data.name): - if not DataSet.data_set_member_exists(src_data.name): - module.fail_json(msg=f"Cannot submit job, the data set member {src_data.raw_name} was not found.") + if remote_src: + if "/" in src: + if path.exists(src): + if path.isfile(src): + job_submitted_id, duration = submit_src_jcl( + module, src, src_name=src, timeout=wait_time, is_unix=True) + else: + module.fail_json(msg=f"Unable to submit job {src} is a folder, must be a file.", **result) + else: + module.fail_json(msg=f"Unable to submit job {src} does not exists.", **result) else: - if not DataSet.data_set_exists(src_data.name): - module.fail_json(msg=f"Cannot submit job, the data set {src_data.raw_name} was not found.") + # Resolving a relative GDS name and escaping special symbols if needed. + src_data = data_set.MVSDataSet(src) + + # Checking that the source is actually present on the system. + if volume is not None: + volumes = [volume] + # Get the data set name to catalog it. + src_ds_name = data_set.extract_dsname(src_data.name) + present, changed = DataSet.attempt_catalog_if_necessary(src_ds_name, volumes) + + if not present: + module.fail_json( + msg=(f"Unable to submit job {src_data.name} because the data set could " + f"not be cataloged on the volume {volume}."), **result + ) + elif data_set.is_member(src_data.name): + if not DataSet.data_set_member_exists(src_data.name): + module.fail_json(msg=f"Cannot submit job, the data set member {src_data.raw_name} was not found.", **result) + else: + if not DataSet.data_set_exists(src_data.name): + module.fail_json(msg=f"Cannot submit job, the data set {src_data.raw_name} was not found.", **result) + job_submitted_id, duration = submit_src_jcl( + module, src_data.name, src_name=src_data.raw_name, timeout=wait_time, is_unix=False, start_time=start_time) + else: job_submitted_id, duration = submit_src_jcl( - module, src_data.name, src_name=src_data.raw_name, timeout=wait_time_s, is_unix=False, start_time=start_time) - elif location == "uss": - job_submitted_id, duration = submit_src_jcl( - module, src, src_name=src, timeout=wait_time_s, is_unix=True) - elif location == "local": - job_submitted_id, duration = submit_src_jcl( - module, src, src_name=src, timeout=wait_time_s, is_unix=True) + module, src, src_name=src, timeout=wait_time, is_unix=True) # Explictly pass None for the unused args else a default of '*' will be # used and return undersirable results job_output_txt = None - result['job_id'] = job_submitted_id is_changed = True # If wait_time_s is 0, we do a deploy and forget strategy. - if wait_time_s != 0: + if wait_time != 0: try: job_output_txt = job_output( job_id=job_submitted_id, owner=None, job_name=None, dd_name=None, - dd_scan=return_output, duration=duration, timeout=wait_time_s, start_time=start_time) + dd_scan=return_output, duration=duration, timeout=wait_time, start_time=start_time) # This is resolvig a bug where the duration coming from job_output is passed by value, duration # being an immutable type can not be changed and must be returned or accessed from the job.py. - if job_output is not None: + if job_output_txt is not None: duration = job_output_txt[0].get("duration") if not None else duration - result["execution_time"] = job_output_txt[0].get("execution_time") + job_output_txt = parsing_job_response(job_output_txt, duration) result["duration"] = duration job_msg = job_output_txt[0].get("ret_code", {}).get("msg") @@ -1125,10 +1078,10 @@ def run_module(): _msg = ("The JCL submitted with job id {0} but appears to be a long " "running job that exceeded its maximum wait time of {1} " "second(s). Consider using module zos_job_query to poll for " - "a long running job or increase option 'wait_time_s' to a value " - "greater than {2}.".format(str(job_submitted_id), str(wait_time_s), str(duration))) + "a long running job or increase option 'wait_time' to a value " + "greater than {2}.".format(str(job_submitted_id), str(wait_time), str(duration))) _msg_suffix = ("Consider using module zos_job_query to poll for " - "a long running job or increase option 'wait_time_s' to a value " + "a long running job or increase option 'wait_time' to a value " "greater than {0}.".format(str(duration))) if job_output_txt is not None: @@ -1144,6 +1097,7 @@ def run_module(): if job_output_txt: result["jobs"] = job_output_txt job_ret_code = job_output_txt[0].get("ret_code") + steps = job_output_txt[0].get("steps") if job_ret_code: job_ret_code_msg = job_ret_code.get("msg") @@ -1151,7 +1105,7 @@ def run_module(): job_ret_code_msg_code = job_ret_code.get("msg_code") if return_output is True and max_rc is not None: - is_changed = assert_valid_return_code(max_rc, job_ret_code_code, job_ret_code, result) + is_changed = assert_valid_return_code(max_rc, job_ret_code_code, job_ret_code, steps, result) if job_ret_code_msg is not None: if re.search("^(?:{0})".format("|".join(JOB_STATUSES)), job_ret_code_msg): @@ -1222,7 +1176,7 @@ def run_module(): if not return_output: for job in result.get("jobs", []): - job["ddnames"] = [] + job["dds"] = [] else: _msg = "The 'ret_code' dictionary was unavailable in the job log." result["ret_code"] = None @@ -1231,7 +1185,7 @@ def run_module(): else: _msg = "The job output log is unavailable." result["stderr"] = _msg - result["jobs"] = None + result["jobs"] = [] raise Exception(_msg) except Exception as err: result["failed"] = True @@ -1245,14 +1199,15 @@ def run_module(): finally: if temp_file is not None: shutil.rmtree(path.dirname(temp_file)) + else: + result["jobs"] = build_empty_response(job_submitted_id) # If max_rc is set, we don't want to default to changed=True, rely on 'is_changed' result["changed"] = True if is_changed else False - result["failed"] = False module.exit_json(**result) -def assert_valid_return_code(max_rc, job_rc, ret_code, result): +def assert_valid_return_code(max_rc, job_rc, ret_code, steps, result): """Asserts valid return code. Parameters @@ -1295,7 +1250,7 @@ def assert_valid_return_code(max_rc, job_rc, ret_code, result): result["stderr"] = _msg raise Exception(_msg) - for step in ret_code["steps"]: + for step in steps: step_cc_rc = int(step["step_cc"]) step_name_for_rc = step["step_name"] if step_cc_rc > max_rc: @@ -1319,6 +1274,76 @@ def assert_valid_return_code(max_rc, job_rc, ret_code, result): return True +def parsing_job_response(jobs_raw, duration): + """_summary_ + + Args: + jobs_raw (_type_): _description_ + """ + job = jobs_raw[0] + jobs = [] + for job in jobs_raw: + job_dict = { + "job_id": job.get("job_id"), + "job_name": job.get("job_name"), + "content_type": job.get("content_type"), + "duration": duration, + "execution_time": job.get("execution_time"), + "dds": job.get("dds"), + "ret_code": job.get("ret_code"), + "steps": job.get("steps"), + "job_class": job.get("job_class"), + "svc_class": job.get("svc_class"), + "system": job.get("system"), + "subsystem": job.get("subsystem"), + "origin_node": job.get("origin_node"), + "cpu_time": job.get("cpu_time"), + "execution_node": job.get("execution_node"), + "priority": job.get("priority"), + "asid": job.get("asid"), + "creation_date": job.get("creation_date"), + "creation_time": job.get("creation_time"), + "queue_position": job.get("queue_position"), + "program_name": job.get("program_name"), + } + jobs.append(job_dict) + return jobs + + +def build_empty_response(job_submitted_id): + """_summary_ + + Args: + jobs_raw (_type_): _description_ + """ + jobs = [] + job_dict = { + "job_id": job_submitted_id, + "job_name": None, + "content_type": None, + "duration": None, + "execution_time": None, + "dds": [], + "ret_code": {"code": None, "msg": None, "msg_code": None, "msg_txt": None}, + "steps": [], + "job_class": None, + "svc_class": None, + "system": None, + "subsystem": None, + "origin_node": None, + "cpu_time": None, + "execution_node": None, + "priority": None, + "asid": None, + "creation_date": None, + "creation_time": None, + "queue_position": None, + "program_name": None, + } + jobs.append(job_dict) + return jobs + + def main(): run_module() diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index f5565ce9ee..c6539eb4f8 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -104,6 +104,7 @@ - Default is EOF required: false type: str + aliases: [ after ] insertbefore: description: - Used with C(state=present). @@ -121,6 +122,7 @@ - Choices are BOF or '*regex*' required: false type: str + aliases: [ before ] backup: description: - Creates a backup file or backup data set for I(src), including the @@ -291,11 +293,23 @@ returned: failure type: str sample: Parameter verification failed -return_content: +stdout: + description: The stdout from ZOAU dsed command. + returned: always + type: str +stderr: description: The error messages from ZOAU dsed - returned: failure + returned: always type: str sample: BGYSC1311E Iconv error, cannot open converter from ISO-88955-1 to IBM-1047 +stdout_lines: + description: List of strings containing individual lines from stdout. + returned: always + type: list +stderr_lines: + description: List of strings containing individual lines from stderr. + returned: always + type: list backup_name: description: Name of the backup file or data set that was created. returned: if backup=true @@ -604,9 +618,11 @@ def main(): line=dict(type='str'), insertafter=dict( type='str', + aliases=['after'] ), insertbefore=dict( type='str', + aliases=['before'] ), backrefs=dict(type='bool', default=False), backup=dict(type='bool', default=False), @@ -627,8 +643,8 @@ def main(): state=dict(arg_type="str", default='present', choices=['absent', 'present']), regexp=dict(arg_type="str", required=False), line=dict(arg_type="str", required=False), - insertafter=dict(arg_type="str", required=False), - insertbefore=dict(arg_type="str", required=False), + insertafter=dict(arg_type="str", required=False, aliases=['after']), + insertbefore=dict(arg_type="str", required=False, aliases=['before']), encoding=dict(arg_type="str", default="IBM-1047", required=False), backup=dict(arg_type="bool", default=False, required=False), backup_name=dict(arg_type="data_set_or_path", required=False, default=None), @@ -673,7 +689,24 @@ def main(): is_gds = False has_special_chars = False dmod_exec = False - return_content = "" + rc = 0 + stdout = '' + stderr = '' + cmd = '' + changed = False + return_content = None + + result = dict( + changed=False, + cmd='', + found=0, + stdout='', + stdout_lines=[], + stderr='', + stderr_lines=[], + rc=0, + backup_name='', + ) # analysis the file type if "/" not in src: @@ -717,27 +750,19 @@ def main(): # state=absent, delete lines with matching regex pattern if parsed_args.get('state') == 'present': if dmod_exec: - rc, cmd, stodut = execute_dsed(src, state=True, encoding=encoding, module=module, line=line, first_match=firstmatch, + rc, cmd, stdout = execute_dsed(src, state=True, encoding=encoding, module=module, line=line, first_match=firstmatch, force=force, backrefs=backrefs, regex=regexp, ins_bef=ins_bef, ins_aft=ins_aft) - result['rc'] = rc - result['cmd'] = cmd - result['stodut'] = stodut - result['return_content'] = stodut - result['changed'] = True if rc == 0 else False stderr = 'Failed to insert new entry' if rc != 0 else "" + changed = True if rc == 0 else False else: return_content = present(src, quotedString(line), quotedString(regexp), quotedString(ins_aft), quotedString(ins_bef), encoding, firstmatch, backrefs, force) else: if dmod_exec: - rc, cmd, stodut = execute_dsed(src, state=False, encoding=encoding, module=module, line=line, first_match=firstmatch, force=force, + rc, cmd, stdout = execute_dsed(src, state=False, encoding=encoding, module=module, line=line, first_match=firstmatch, force=force, backrefs=backrefs, regex=regexp, ins_bef=ins_bef, ins_aft=ins_aft) - result['rc'] = rc - result['cmd'] = cmd - result['stodut'] = stodut - result['return_content'] = stodut - result['changed'] = True if rc == 0 else False stderr = 'Failed to insert new entry' if rc != 0 else "" + changed = True if rc == 0 else False else: return_content = absent(src, quotedString(line), quotedString(regexp), encoding, force) if not dmod_exec: @@ -790,17 +815,16 @@ def main(): if 'cmd' in ret: ret['cmd'] = ret['cmd'].replace('\\"', '"').replace('\\\\', '\\') result['cmd'] = ret['cmd'] - result['changed'] = ret.get('changed', False) + changed = ret.get('changed', False) result['found'] = ret.get('found', 0) result['stdout'] = stdout - result['return_content'] = stdout - # Only return 'rc' if stderr is not empty to not fail the playbook run in a nomatch case - # That information will be given with 'changed' and 'found' - if len(stderr): - result['stderr'] = str(stderr) - result['rc'] = rc - if 'backup_name' not in result: - result['backup_name'] = "" + result['changed'] = changed + result['rc'] = rc + result['cmd'] = cmd + result['stdout'] = str(stdout) + result['stderr'] = str(stderr) + result['stdout_lines'] = result['stdout'].splitlines() + result['stderr_lines'] = result['stderr'].splitlines() module.exit_json(**result) diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index a35dda095a..825c76a8fe 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -62,38 +62,38 @@ - > If I(state=mounted) and I(src) are not in use, the module will add the file system entry to the parmlib member - I(persistent/data_store) if not present. The I(path) will be + I(persistent/name) if not present. The I(path) will be updated, the device will be mounted and the module will complete successfully with I(changed=True). - > If I(state=mounted) and I(src) are in use, the module will add the file system entry to the parmlib member - I(persistent/data_store) if not present. The I(path) will not + I(persistent/name) if not present. The I(path) will not be updated, the device will not be mounted and the module will complete successfully with I(changed=False). - > If I(state=unmounted) and I(src) are in use, the module will B(not) add the file system entry to the parmlib member - I(persistent/data_store). The device will be unmounted and + I(persistent/name). The device will be unmounted and the module will complete successfully with I(changed=True). - > If I(state=unmounted) and I(src) are not in use, the module will B(not) add the file system entry to parmlib member - I(persistent/data_store).The device will remain unchanged and + I(persistent/name).The device will remain unchanged and the module will complete with I(changed=False). - > If I(state=present), the module will add the file system entry - to the provided parmlib member I(persistent/data_store) + to the provided parmlib member I(persistent/name) if not present. The module will complete successfully with I(changed=True). - > If I(state=absent), the module will remove the file system entry - to the provided parmlib member I(persistent/data_store) if + to the provided parmlib member I(persistent/name) if present. The module will complete successfully with I(changed=True). - > If I(state=remounted), the module will B(not) add the file - system entry to parmlib member I(persistent/data_store). The + system entry to parmlib member I(persistent/name). The device will be unmounted and mounted, the module will complete successfully with I(changed=True). type: str @@ -107,22 +107,23 @@ default: mounted persistent: description: - - Add or remove mount command entries to provided I(data_store) + - Add or remove mount command entries to provided I(name) required: False type: dict suboptions: - data_store: + name: description: - The data set name used for persisting a mount command. This is usually BPXPRMxx or a copy. required: True type: str + aliases: [ data_store ] backup: description: - Creates a backup file or backup data set for - I(data_store), including the timestamp information to + I(name), including the timestamp information to ensure that you retrieve the original parameters defined - in I(data_store). + in I(name). - I(backup_name) can be used to specify a backup file name if I(backup=true). - The backup file name will be returned on either success or @@ -135,7 +136,7 @@ description: - Specify the USS file name or data set name for the destination backup. - - If the source I(data_store) is a USS file or path, the + - If the source I(name) is a USS file or path, the I(backup_name) name can be relative or absolute for file or path name. - If the source is an MVS data set, the backup_name must be @@ -151,15 +152,16 @@ MVS backup data set recovery can be done by renaming it. required: false type: str - comment: + marker: description: - - If provided, this is used as a comment that surrounds the - command in the I(persistent/data_store) - - Comments are used to encapsulate the I(persistent/data_store) entry + - If provided, this is used as a marker that surrounds the + command in the I(persistent/name) + - Comments are used to encapsulate the I(persistent/name) entry such that they can easily be understood and located. type: list elements: str required: False + aliases: [ comment ] unmount_opts: description: - Describes how the unmount will be performed. @@ -356,8 +358,8 @@ fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) - comment: For Tape2 project + name: SYS1.PARMLIB(BPXPRMAA) + marker: For Tape2 project - name: Mount a filesystem and record change in BPXPRMAA after backing up to BPXPRMAB. zos_mount: @@ -366,10 +368,10 @@ fs_type: zfs state: mounted persistent: - data_store: SYS1.PARMLIB(BPXPRMAA) + name: SYS1.PARMLIB(BPXPRMAA) backup: true backup_name: SYS1.PARMLIB(BPXPRMAB) - comment: For Tape2 project + marker: For Tape2 project - name: Mount a filesystem ignoring uid/gid values. zos_mount: @@ -440,7 +442,7 @@ returned: always type: dict contains: - data_store: + name: description: The persistent store name where the mount was written to. returned: always type: str @@ -455,12 +457,19 @@ returned: always type: str sample: SYS1.FILESYS(PRMAABAK) - comment: - description: The text that was used in markers around the I(Persistent/data_store) entry. + marker: + description: The text that was used in markers around the I(Persistent/name) entry. returned: always type: list sample: - [u'I did this because..'] + state: + description: + - The state of the persistent entry in the persistent data set. + - Possible values are C(added) and C(removed). + returned: always + type: str + sample: added unmount_opts: description: Describes how the unmount is to be performed. returned: changed and if state=unmounted @@ -740,7 +749,7 @@ def run_module(module, arg_def): persistent = parsed_args.get("persistent") backup = None backup_name = "" - comment = None + marker = None unmount_opts = parsed_args.get("unmount_opts") mount_opts = parsed_args.get("mount_opts") src_params = parsed_args.get("src_params") @@ -753,8 +762,8 @@ def run_module(module, arg_def): tmphlq = parsed_args.get("tmp_hlq") if persistent: - data_store = persistent.get("data_store").upper() - comment = persistent.get("comment") + name = persistent.get("name").upper() + marker = persistent.get("marker") backup = persistent.get("backup") if backup: if persistent.get("backup_name"): @@ -763,20 +772,19 @@ def run_module(module, arg_def): backup_code = None else: backup_code = backup_name - backup_name = mt_backupOper(module, data_store, backup_code, tmphlq) + backup_name = mt_backupOper(module, name, backup_code, tmphlq) res_args["backup_name"] = backup_name del persistent["backup"] - if "mounted" in state or "present" in state: - persistent["addDataset"] = data_store + if state == "mounted" or state == "present": + persistent["state"] = "added" else: - persistent["delDataset"] = data_store - del persistent["data_store"] + persistent["state"] = "removed" write_persistent = False if "mounted" in state or "present" in state or "absent" in state: if persistent: - if data_store: - if len(data_store) > 0: + if name: + if len(name) > 0: write_persistent = True will_mount = True @@ -793,7 +801,7 @@ def run_module(module, arg_def): path=path, fs_type=fs_type, state=state, - persistent=parsed_args.get("persistent"), + persistent=persistent, unmount_opts=unmount_opts, mount_opts=mount_opts, src_params=src_params, @@ -805,13 +813,12 @@ def run_module(module, arg_def): automove_list=automove_list, cmd="not built", changed=changed, - comment=comment, + marker=marker, rc=0, stdout="", stderr="", ) ) - # data set to be mounted/unmounted must exist fs_du = data_set.DataSetUtils(src, tmphlq=tmphlq) fs_exists = fs_du.exists() @@ -869,10 +876,10 @@ def run_module(module, arg_def): parmtext = "" - if comment is not None: + if marker is not None: extra = "" ctr = 1 - for tabline in comment: + for tabline in marker: if len(extra) > 0: extra += " " extra += tabline.strip() @@ -1033,18 +1040,18 @@ def run_module(module, arg_def): stderr = "Mount called on data set that is already mounted.\n" if write_persistent and module.check_mode is False: - fst_du = data_set.DataSetUtils(data_store, tmphlq=tmphlq) + fst_du = data_set.DataSetUtils(name, tmphlq=tmphlq) fst_exists = fst_du.exists() if fst_exists is False: module.fail_json( - msg="Persistent data set ({0}) is either not cataloged or does not exist.".format(data_store), + msg="Persistent data set ({0}) is either not cataloged or does not exist.".format(name), stderr=str(res_args), ) bk_ds = datasets.tmp_name(high_level_qualifier=tmphlq) datasets.create(name=bk_ds, dataset_type="SEQ") - new_str = get_str_to_keep(dataset=data_store, src=src) + new_str = get_str_to_keep(dataset=name, src=src) rc_write = 0 @@ -1056,13 +1063,13 @@ def run_module(module, arg_def): except Exception as e: datasets.delete(dataset=bk_ds) module.fail_json( - msg="Unable to write on persistent data set {0}. {1}".format(data_store, e), + msg="Unable to write on persistent data set {0}. {1}".format(name, e), stderr=str(res_args), ) try: - datasets.delete(dataset=data_store) - datasets.copy(source=bk_ds, target=data_store) + datasets.delete(dataset=name) + datasets.copy(source=bk_ds, target=name) finally: datasets.delete(dataset=bk_ds) @@ -1072,7 +1079,7 @@ def run_module(module, arg_def): marker = '/* {mark} ANSIBLE MANAGED BLOCK ' + dtstr + " */" marker = "{0}\\n{1}\\n{2}".format("BEGIN", "END", marker) - datasets.blockinfile(dataset=data_store, state=True, block=parmtext, marker=marker, insert_after="EOF") + datasets.blockinfile(dataset=name, state=True, block=parmtext, marker=marker, insert_after="EOF") if rc == 0: if stdout is None: @@ -1091,7 +1098,7 @@ def run_module(module, arg_def): stderr=stderr, ) ) - del res_args["comment"] + del res_args["marker"] return res_args @@ -1130,13 +1137,33 @@ def main(): type="dict", required=False, options=dict( - data_store=dict( + name=dict( type="str", required=True, + aliases=["data_store"], + deprecated_aliases=[ + dict( + name='data_store', + version='3.0.0', # Version when it will be removed + collection_name='ibm.ibm_zos_core', + ) + ], ), backup=dict(type="bool", default=False), backup_name=dict(type="str", required=False, default=None), - comment=dict(type="list", elements="str", required=False), + marker=dict( + type="list", + elements="str", + required=False, + aliases=["comment"], + deprecated_aliases=[ + dict( + name='comment', + version='3.0.0', # Version when it will be removed + collection_name='ibm.ibm_zos_core', + ) + ], + ), ), ), unmount_opts=dict( @@ -1193,10 +1220,10 @@ def main(): arg_type="dict", required=False, options=dict( - data_store=dict(arg_type="str", required=True), + name=dict(arg_type="str", required=True, aliases=["data_store"]), backup=dict(arg_type="bool", default=False), backup_name=dict(arg_type="str", required=False, default=None), - comment=dict(arg_type="list", elements="str", required=False), + marker=dict(arg_type="list", elements="str", required=False, aliases=["comment"]), ), ), unmount_opts=dict( diff --git a/plugins/modules/zos_operator.py b/plugins/modules/zos_operator.py index ab529cf335..eb8bc34fad 100644 --- a/plugins/modules/zos_operator.py +++ b/plugins/modules/zos_operator.py @@ -50,17 +50,26 @@ type: bool required: false default: false - wait_time_s: + wait_time: description: - Set maximum time in seconds to wait for the commands to execute. - When set to 0, the system default is used. - This option is helpful on a busy system requiring more time to execute commands. - Setting I(wait) can instruct if execution should wait the - full I(wait_time_s). + full I(wait_time). type: int required: false default: 1 + time_unit: + description: + - Set the C(wait_time) unit of time, which can be C(s) (seconds) or C(cs) (centiseconds). + type: str + required: false + default: s + choices: + - s + - cs case_sensitive: description: - If C(true), the command will not be converted to uppercase before @@ -103,11 +112,17 @@ - name: Execute operator command to show jobs, always waiting 5 seconds for response zos_operator: cmd: 'd a,all' - wait_time_s: 5 + wait_time: 5 - name: Display the system symbols and associated substitution texts. zos_operator: cmd: 'D SYMBOLS' + +- name: Execute an operator command to show device status and allocation wait 10 centiseconds. + zos_operator: + cmd: 'd u' + wait_time : 10 + time_unit : 'cs' """ RETURN = r""" @@ -125,16 +140,22 @@ sample: d u,all elapsed: description: - The number of seconds that elapsed waiting for the command to complete. + The number of seconds or centiseconds that elapsed waiting for the command to complete. returned: always type: float sample: 51.53 -wait_time_s: +wait_time: description: - The maximum time in seconds to wait for the commands to execute. + The maximum time in the time_unit set to wait for the commands to execute. returned: always type: int sample: 5 +time_unit: + description: + The time unit set for wait_time. + returned: always + type: str + sample: s content: description: The resulting text from the command submitted. @@ -200,7 +221,7 @@ opercmd = ZOAUImportError(traceback.format_exc()) -def execute_command(operator_cmd, timeout_s=1, preserve=False, *args, **kwargs): +def execute_command(operator_cmd, time_unit, timeout=1, preserve=False, *args, **kwargs): """ Executes an operator command. @@ -208,6 +229,8 @@ def execute_command(operator_cmd, timeout_s=1, preserve=False, *args, **kwargs): ---------- operator_cmd : str Command to execute. + time_unit : str + Unit of time to wait of execution of the command. timeout : int Time until it stops whether it finished or not. preserve : bool @@ -223,15 +246,20 @@ def execute_command(operator_cmd, timeout_s=1, preserve=False, *args, **kwargs): Return code, standard output, standard error and time elapsed from start to finish. """ # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: - timeout_c = 100 * timeout_s + if time_unit == "s": + timeout = 100 * timeout start = timer() - response = opercmd.execute(operator_cmd, timeout=timeout_c, preserve=preserve, *args, **kwargs) + response = opercmd.execute(operator_cmd, timeout=timeout, preserve=preserve, *args, **kwargs) end = timer() rc = response.rc stdout = response.stdout_response stderr = response.stderr_response - elapsed = round(end - start, 2) + if time_unit == "cs": + elapsed = round((end - start) * 100, 2) + else: + elapsed = round(end - start, 2) + return rc, stdout, stderr, elapsed @@ -252,7 +280,8 @@ def run_module(): module_args = dict( cmd=dict(type="str", required=True), verbose=dict(type="bool", required=False, default=False), - wait_time_s=dict(type="int", required=False, default=1), + wait_time=dict(type="int", required=False, default=1), + time_unit=dict(type="str", required=False, choices=["s", "cs"], default="s"), case_sensitive=dict(type="bool", required=False, default=False), ) @@ -294,7 +323,8 @@ def run_module(): # call is returned from run_operator_command, specifying what was run. # result["cmd"] = new_params.get("cmd") result["cmd"] = rc_message.get("call") - result["wait_time_s"] = new_params.get("wait_time_s") + result["wait_time"] = new_params.get("wait_time") + result["time_unit"] = new_params.get("time_unit") result["changed"] = False # rc=0, something succeeded (the calling script ran), @@ -309,7 +339,8 @@ def run_module(): module.fail_json(msg=("A non-zero return code was received : {0}. Review the response for more details.").format(result["rc"]), cmd=result["cmd"], elapsed_time=result["elapsed"], - wait_time_s=result["wait_time_s"], + wait_time=result["wait_time"], + time_unit=result["time_unit"], stderr=str(error) if error is not None else result["content"], stderr_lines=str(error).splitlines() if error is not None else result["content"], changed=result["changed"],) @@ -338,9 +369,10 @@ def parse_params(params): """ arg_defs = dict( cmd=dict(arg_type="str", required=True), - verbose=dict(arg_type="bool", required=False), - wait_time_s=dict(arg_type="int", required=False), - case_sensitive=dict(arg_type="bool", required=False), + verbose=dict(arg_type="bool", required=False, default=False), + wait_time=dict(arg_type="int", required=False, default=1), + time_unit=dict(type="str", required=False, choices=["s", "cs"], default="s"), + case_sensitive=dict(arg_type="bool", required=False, default=False), ) parser = BetterArgParser(arg_defs) new_params = parser.parse_args(params) @@ -369,7 +401,8 @@ def run_operator_command(params): kwargs.update({"verbose": True}) kwargs.update({"debug": True}) - wait_s = params.get("wait_time_s") + wait_time = params.get("wait_time") + time_unit = params.get("time_unit") cmdtxt = params.get("cmd") preserve = params.get("case_sensitive") @@ -381,7 +414,7 @@ def run_operator_command(params): kwargs.update({"wait": True}) args = [] - rc, stdout, stderr, elapsed = execute_command(cmdtxt, timeout_s=wait_s, preserve=preserve, *args, **kwargs) + rc, stdout, stderr, elapsed = execute_command(cmdtxt, time_unit=time_unit, timeout=wait_time, preserve=preserve, *args, **kwargs) if rc > 0: message = "\nOut: {0}\nErr: {1}\nRan: {2}".format(stdout, stderr, cmdtxt) diff --git a/plugins/modules/zos_operator_action_query.py b/plugins/modules/zos_operator_action_query.py index 525518a2f2..2b38e75aa1 100644 --- a/plugins/modules/zos_operator_action_query.py +++ b/plugins/modules/zos_operator_action_query.py @@ -30,6 +30,7 @@ - "Demetrios Dimatos (@ddimatos)" - "Ivan Moreno (@rexemin)" - "Rich Parker (@richp405)" + - "Fernando Flores (@fernandofloresg)" options: system: @@ -41,7 +42,7 @@ - A trailing asterisk, (*) wildcard is supported. type: str required: false - message_id: + msg_id: description: - Return outstanding messages requiring operator action awaiting a reply for a particular message identifier. @@ -50,6 +51,7 @@ - A trailing asterisk, (*) wildcard is supported. type: str required: false + aliases: [ message_id ] job_name: description: - Return outstanding messages requiring operator action awaiting a reply @@ -59,7 +61,7 @@ - A trailing asterisk, (*) wildcard is supported. type: str required: false - message_filter: + msg_filter: description: - Return outstanding messages requiring operator action awaiting a reply that match a regular expression (regex) filter. @@ -67,11 +69,12 @@ are returned regardless of their content. type: dict required: false + aliases: [ message_filter ] suboptions: filter: description: - Specifies the substring or regex to match to the outstanding messages, - see I(use_regex). + see I(literal). - All special characters in a filter string that are not a regex are escaped. - Valid Python regular expressions are supported. See L(the official documentation,https://docs.python.org/library/re.html) for more information. @@ -80,16 +83,16 @@ newline." required: True type: str - use_regex: + literal: description: - Indicates that the value for I(filter) is a regex or a string to match. - - If False, the module assumes that I(filter) is not a regex and - matches the I(filter) substring on the outstanding messages. - - If True, the module creates a regex from the I(filter) string and + - If False, the module creates a regex from the I(filter) string and matches it to the outstanding messages. + - If True, the module assumes that I(filter) is not a regex and + matches the I(filter) substring on the outstanding messages. required: False type: bool - default: False + default: True seealso: - module: zos_operator @@ -116,11 +119,11 @@ - name: Display all outstanding messages whose message id begin with dsi* zos_operator_action_query: - message_id: dsi* + msg_id: dsi* - name: Display all outstanding messages that have the text IMS READY in them zos_operator_action_query: - message_filter: + msg_filter: filter: IMS READY - name: Display all outstanding messages where the job name begins with 'mq', @@ -128,11 +131,11 @@ pattern 'IMS' zos_operator_action_query: job_name: mq* - message_id: dsi* + msg_id: dsi* system: mv29 - message_filter: + msg_filter: filter: ^.*IMS.*$ - use_regex: true + literal: true """ RETURN = r""" @@ -147,13 +150,13 @@ count: description: The total number of outstanding messages. - returned: on success + returned: always type: int sample: 12 actions: description: The list of the outstanding messages. - returned: success + returned: always type: list elements: dict contains: @@ -183,11 +186,11 @@ returned: on success type: str sample: STC01537 - message_text: + msg_txt: description: Content of the outstanding message requiring operator - action awaiting a reply. If I(message_filter) is set, - I(message_text) will be filtered accordingly. + action awaiting a reply. If I(msg_filter) is set, + I(msg_txt) will be filtered accordingly. returned: success type: str sample: "*399 HWSC0000I *IMS CONNECT READY* IM5HCONN" @@ -198,7 +201,7 @@ returned: success type: str sample: IM5HCONN - message_id: + msg_id: description: Message identifier for outstanding message requiring operator action awaiting a reply. @@ -212,18 +215,18 @@ "type": 'R', "system": 'MV27', "job_id": 'STC01537', - "message_text": '*399 HWSC0000I *IMS CONNECT READY* IM5HCONN', + "msg_txt": '*399 HWSC0000I *IMS CONNECT READY* IM5HCONN', "job_name": 'IM5HCONN', - "message_id": 'HWSC0000I' + "msg_id": 'HWSC0000I' }, { "number": '002', "type": 'R', "system": 'MV27', "job_id": 'STC01533', - "message_text": '*400 DFS3139I IMS INITIALIZED, AUTOMATIC RESTART PROCEEDING IM5H', + "msg_txt": '*400 DFS3139I IMS INITIALIZED, AUTOMATIC RESTART PROCEEDING IM5H', "job_name": 'IM5HCTRL', - "message_id": 'DFS3139I' + "msg_id": 'DFS3139I' } ] """ @@ -260,19 +263,20 @@ def run_module(): """ module_args = dict( system=dict(type="str", required=False), - message_id=dict(type="str", required=False), + msg_id=dict(type="str", required=False, aliases=['message_id']), job_name=dict(type="str", required=False), - message_filter=dict( + msg_filter=dict( type="dict", required=False, + aliases=['message_filter'], options=dict( filter=dict(type="str", required=True), - use_regex=dict(default=False, type="bool", required=False) + literal=dict(default=True, type="bool", required=False) ) ) ) - result = dict(changed=False) + result = dict(changed=False, count=0, actions=[]) module = AnsibleModule(argument_spec=module_args, supports_check_mode=False) requests = [] try: @@ -321,7 +325,7 @@ def run_module(): cmd="d r,a,jn", ) - merged_list = create_merge_list(cmd_result_a.message, cmd_result_b.message, new_params['message_filter']) + merged_list = create_merge_list(cmd_result_a.message, cmd_result_b.message, new_params['msg_filter']) requests = find_required_request(merged_list, new_params) if requests: result["count"] = len(requests) @@ -351,9 +355,9 @@ def parse_params(params): """ arg_defs = dict( system=dict(arg_type=system_type, required=False), - message_id=dict(arg_type=message_id_type, required=False), + msg_id=dict(arg_type=msg_id_type, required=False), job_name=dict(arg_type=job_name_type, required=False), - message_filter=dict(arg_type=message_filter_type, required=False) + msg_filter=dict(arg_type=msg_filter_type, required=False) ) parser = BetterArgParser(arg_defs) new_params = parser.parse_args(params) @@ -380,7 +384,7 @@ def system_type(arg_val, params): return arg_val.upper() -def message_id_type(arg_val, params): +def msg_id_type(arg_val, params): """Message id type. Parameters @@ -420,7 +424,7 @@ def job_name_type(arg_val, params): return arg_val.upper() -def message_filter_type(arg_val, params): +def msg_filter_type(arg_val, params): """Message filter type. Parameters @@ -442,12 +446,12 @@ def message_filter_type(arg_val, params): """ try: filter_text = arg_val.get("filter") - use_regex = arg_val.get("use_regex") + literal = arg_val.get("literal") - if use_regex: - raw_arg_val = r'{0}'.format(filter_text) - else: + if literal: raw_arg_val = r'^.*{0}.*$'.format(re.escape(filter_text)) + else: + raw_arg_val = r'{0}'.format(filter_text) re.compile(raw_arg_val) except re.error: @@ -503,7 +507,7 @@ def find_required_request(merged_list, params): return requests -def create_merge_list(message_a, message_b, message_filter): +def create_merge_list(msg_a, msg_b, msg_filter): """Merge the return lists that execute both 'd r,a,s' and 'd r,a,jn'. For example, if we have: 'd r,a,s' response like: "742 R MV28 JOB57578 &742 ARC0055A REPLY 'GO' OR 'CANCEL'" @@ -512,20 +516,20 @@ def create_merge_list(message_a, message_b, message_filter): Parameters ---------- - message_a : str + msg_a : str Result coming from command 'd r,a,s'. - message_b : str + msg_b : str Result coming from command 'd r,a,jn'. - message_filter : str + msg_filter : str Message filter. Returns ------- Union - Merge of the result of message_a and the result of message_b. + Merge of the result of msg_a and the result of msg_b. """ - list_a = parse_result_a(message_a, message_filter) - list_b = parse_result_b(message_b, message_filter) + list_a = parse_result_a(msg_a, msg_filter) + list_b = parse_result_b(msg_b, msg_filter) merged_list = merge_list(list_a, list_b) return merged_list @@ -546,15 +550,15 @@ def filter_requests(merged_list, params): Filtered list. """ system = params.get("system") - message_id = params.get("message_id") + msg_id = params.get("msg_id") job_name = params.get("job_name") newlist = merged_list if system: newlist = handle_conditions(newlist, "system", system) if job_name: newlist = handle_conditions(newlist, "job_name", job_name) - if message_id: - newlist = handle_conditions(newlist, "message_id", message_id) + if msg_id: + newlist = handle_conditions(newlist, "msg_id", msg_id) return newlist @@ -619,14 +623,14 @@ def execute_command(operator_cmd, timeout_s=1, *args, **kwargs): return OperatorQueryResult(rc, stdout, stderr) -def match_raw_message(msg, message_filter): +def match_raw_message(msg, msg_filter): """Match raw message. Parameters ---------- msg : str Message to match. - message_filter : str + msg_filter : str Filter for the message. Return @@ -634,11 +638,11 @@ def match_raw_message(msg, message_filter): bool If the pattern matches msg. """ - pattern = re.compile(message_filter, re.DOTALL) + pattern = re.compile(msg_filter, re.DOTALL) return pattern.match(msg) -def parse_result_a(result, message_filter): +def parse_result_a(result, msg_filter): """parsing the result that coming from command 'd r,a,s', there are usually two formats: - line with job_id: 810 R MV2D JOB58389 &810 ARC0055A REPLY 'GO' OR 'CANCEL' @@ -649,7 +653,7 @@ def parse_result_a(result, message_filter): ---------- result : str Result coming from command 'd r,a,s'. - message_filter : str + msg_filter : str Message filter. Returns @@ -668,7 +672,7 @@ def parse_result_a(result, message_filter): ) for match in match_iter: # If there was a filter specified, we skip messages that do not match it. - if message_filter is not None and not match_raw_message(match.string, message_filter): + if msg_filter is not None and not match_raw_message(match.string, msg_filter): continue dict_temp = { @@ -679,13 +683,13 @@ def parse_result_a(result, message_filter): if match.group(4) != "": dict_temp["job_id"] = match.group(4) if match.group(5) != "": - dict_temp["message_text"] = match.group(5).strip() + dict_temp["msg_txt"] = match.group(5).strip() list.append(dict_temp) return list -def parse_result_b(result, message_filter): +def parse_result_b(result, msg_filter): """Parse the result that comes from command 'd r,a,jn', the main purpose to use this command is to get the job_name and message id, which is not included in 'd r,a,s' @@ -694,7 +698,7 @@ def parse_result_b(result, message_filter): ---------- result : str Result coming from command 'd r,a,jn'. - message_filter : str + msg_filter : str Message filter. Returns @@ -714,13 +718,13 @@ def parse_result_b(result, message_filter): for match in match_iter: # If there was a filter specified, we skip messages that do not match it. - if message_filter is not None and not match_raw_message(match.string, message_filter): + if msg_filter is not None and not match_raw_message(match.string, msg_filter): continue dict_temp = { "number": match.group(1), "job_name": match.group(2), - "message_id": match.group(3), + "msg_id": match.group(3), } # Sometimes 'job_name' will be null because the operator action is a diff --git a/plugins/modules/zos_started_task.py b/plugins/modules/zos_started_task.py new file mode 100644 index 0000000000..295cdefa94 --- /dev/null +++ b/plugins/modules/zos_started_task.py @@ -0,0 +1,1518 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2025 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import (absolute_import, division, print_function) + +__metaclass__ = type + +DOCUMENTATION = r""" +module: zos_started_task +version_added: 2.0.0 +author: + - "Ravella Surendra Babu (@surendrababuravella)" +short_description: Perform operations on started tasks. +description: + - start, display, modify, cancel, force and stop a started task + +options: + arm: + description: + - I(arm) indicates to execute normal task termination routines without causing address space destruction. + - Only applicable when I(state) is C(forced), otherwise ignored. + required: false + type: bool + armrestart: + description: + - Indicates that the batch job or started task should be automatically restarted after CANCEL or FORCE + completes, if it is registered as an element of the automatic restart manager. If the job or task is + not registered or if you do not specify this parameter, MVS will not automatically restart the job or task. + - Only applicable when I(state) is C(cancelled) or C(forced), otherwise ignored. + required: false + type: bool + asidx: + description: + - When I(state) is C(cancelled), C(stopped) or C(forced), I(asidx) is the hexadecimal address space + identifier of the work unit you want to cancel, stop or force. + - Only applicable when I(state) is C(stopped), C(cancelled), or C(forced), otherwise ignored. + required: false + type: str +# device_type: +# description: +# - Type of the output device (if any) associated with the task. +# - Only applicable when I(state) is C(started), otherwise ignored. +# required: false +# type: str +# device_number: +# description: +# - Number of the device to be started. A device number is 3 or 4 hexadecimal digits. A slash (/) must +# precede a 4-digit number but not a 3-digit number. +# - Only applicable when I(state) is C(started), otherwise ignored. +# required: false +# type: str + dump: + description: + - Whether to perform a dump. The type of dump (SYSABEND, SYSUDUMP, or SYSMDUMP) + depends on the JCL for the job. + - Only applicable when I(state) is C(cancelled), otherwise ignored. + required: false + type: bool + identifier_name: + description: + - Option I(identifier_name) is the name that identifies the task. This name can be up to 8 + characters long. The first character must be alphabetical. + required: false + type: str + aliases: + - identifier + job_account: + description: + - Specifies accounting data in the JCL JOB statement for the started task. If the source JCL + already had accounting data, the value that is specified on this parameter overrides it. + - Only applicable when I(state) is C(started), otherwise ignored. + required: false + type: str + job_name: + description: + - When I(state) is started, this is the name which should be assigned to a started task + while starting it. If I(job_name) is not specified, then I(member_name) is used as job's name. + - When I(state) is C(displayed), C(modified), C(cancelled), C(stopped), or C(forced), I(job_name) is the + started task name. + required: false + type: str + aliases: + - job + - task + - task_name + keyword_parameters: + description: + - Any appropriate keyword parameter that you specify to override the corresponding parameter in the cataloged + procedure. The maximum length of each keyword=option pair is 66 characters. No individual value within this + field can be longer than 44 characters in length. + - Only applicable when I(state) is C(started), otherwise ignored. + required: false + type: dict + member_name: + description: + - Name of a member of a partitioned data set that contains the source JCL for the task to be started. The member + can be either a job or a cataloged procedure. + - Only applicable when I(state) is C(started), otherwise ignored. + required: false + type: str + aliases: + - member + parameters: + description: + - Program parameters passed to the started program. + - Only applicable when I(state) is C(started) or C(modified), otherwise ignored. + required: false + type: list + elements: str +# retry_force: +# description: +# - Indicates whether retry will be attempted on ABTERM(abnormal termination). +# - I(tcb_address) is mandatory to use I(retry_force). +# - Only applicable when I(state) is C(forced), otherwise ignored. +# required: false +# type: bool + reus_asid: + description: + - When I(reus_asid) is C(True) and REUSASID(YES) is specified in the DIAGxx parmlib member, a reusable ASID is assigned + to the address space created by the START command. If I(reus_asid) is not specified or REUSASID(NO) is specified in + DIAGxx, an ordinary ASID is assigned. + - Only applicable when I(state) is C(started), otherwise ignored. + required: false + type: bool + state: + description: + - I(state) should be the desired state of the started task after the module is executed. + - If I(state) is C(started) and the respective member is not present on the managed node, then error will be thrown with C(rc=1), + C(changed=false) and I(stderr) which contains error details. + - If I(state) is C(cancelled), C(modified), C(displayed), C(stopped) or C(forced) and the started task is not running on the managed node, + then error will be thrown with C(rc=1), C(changed=false) and I(stderr) contains error details. + - If I(state) is C(displayed) and the started task is running, then the module will return the started task details along with + C(changed=true). + required: True + type: str + choices: + - started + - displayed + - modified + - cancelled + - stopped + - forced + subsystem: + description: + - The name of the subsystem that selects the task for processing. The name must be 1-4 + characters long, which are defined in the IEFSSNxx parmlib member, and the subsystem must + be active. + - Only applicable when I(state) is C(started), otherwise ignored. + required: false + type: str + task_id: + description: + - The started task id starts with STC. + - Only applicable when I(state) is C(displayed), C(modified), C(cancelled), C(stopped), or C(forced), otherwise ignored. + required: false + type: str +# tcb_address: +# description: +# - 6-digit hexadecimal TCB address of the task to terminate. +# - Only applicable when I(state) is C(forced), otherwise ignored. +# required: false +# type: str +# volume: +# description: +# - If I(device_type) is a tape or direct-access device, the serial number of the volume, +# mounted on the device. +# - Only applicable when I(state) is C(started), otherwise ignored. +# required: false +# type: str + userid: + description: + - The user ID of the time-sharing user you want to cancel or force. + - Only applicable when I(state) is C(cancelled) or C(forced), otherwise ignored. + required: false + type: str + verbose: + description: + - When C(verbose=true), the module will return system logs that describe the task's execution. + This option can return a big response depending on system load, also it could surface other + program's activity. + required: false + type: bool + default: false + wait_time: + description: + - Total time that the module will wait for a submitted task, measured in seconds. + The time begins when the module is executed on the managed node. Default value of 0 means to wait the default + amount of time supported by the opercmd utility. + required: false + default: 0 + type: int + +attributes: + action: + support: none + description: Indicates this has a corresponding action plugin so some parts of the options can be executed on the controller. + async: + support: full + description: Supports being used with the ``async`` keyword. + check_mode: + support: full + description: Can run in check_mode and return changed status prediction without modifying target. If not supported, the action will be skipped. +""" +EXAMPLES = r""" +- name: Start a started task using a member in a partitioned data set. + zos_started_task: + state: "started" + member: "PROCAPP" +- name: Start a started task using a member name and giving it an identifier. + zos_started_task: + state: "started" + member: "PROCAPP" + identifier: "SAMPLE" +- name: Start a started task using both a member and a job name. + zos_started_task: + state: "started" + member: "PROCAPP" + job_name: "SAMPLE" +- name: Start a started task and enable verbose output. + zos_started_task: + state: "started" + member: "PROCAPP" + job_name: "SAMPLE" + verbose: True +- name: Start a started task specifying the subsystem and enabling a reusable ASID. + zos_started_task: + state: "started" + member: "PROCAPP" + subsystem: "MSTR" + reus_asid: "YES" +- name: Display a started task using a started task name. + zos_started_task: + state: "displayed" + task_name: "PROCAPP" +- name: Display a started task using a started task id. + zos_started_task: + state: "displayed" + task_id: "STC00012" +- name: Display all started tasks that begin with an s using a wildcard. + zos_started_task: + state: "displayed" + task_name: "s*" +- name: Display all started tasks. + zos_started_task: + state: "displayed" + task_name: "all" +- name: Cancel a started task using task name. + zos_started_task: + state: "cancelled" + task_name: "SAMPLE" +- name: Cancel a started task using a started task id. + zos_started_task: + state: "cancelled" + task_id: "STC00093" +- name: Cancel a started task using it's task name and ASID. + zos_started_task: + state: "cancelled" + task_name: "SAMPLE" + asidx: 0014 +- name: Modify a started task's parameters. + zos_started_task: + state: "modified" + task_name: "SAMPLE" + parameters: ["XX=12"] +- name: Modify a started task's parameters using a started task id. + zos_started_task: + state: "modified" + task_id: "STC00034" + parameters: ["XX=12"] +- name: Stop a started task using it's task name. + zos_started_task: + state: "stopped" + task_name: "SAMPLE" +- name: Stop a started task using a started task id. + zos_started_task: + state: "stopped" + task_id: "STC00087" +- name: Stop a started task using it's task name, identifier and ASID. + zos_started_task: + state: "stopped" + task_name: "SAMPLE" + identifier: "SAMPLE" + asidx: 00A5 +- name: Force a started task using it's task name. + zos_started_task: + state: "forced" + task_name: "SAMPLE" +- name: Force a started task using it's task id. + zos_started_task: + state: "forced" + task_id: "STC00065" +""" + +RETURN = r""" +changed: + description: + - True if the state was changed, otherwise False. + returned: always + type: bool +cmd: + description: + - Command executed via opercmd. + returned: changed + type: str + sample: S SAMPLE +msg: + description: + - Failure or skip message returned by the module. + returned: failure or skipped + type: str + sample: Command parameters are invalid. +rc: + description: + - The return code is 0 when command executed successfully. + - The return code is 1 when opercmd throws any error. + - The return code is 4 when task_id format is invalid. + - The return code is 5 when any parameter validation failed. + - The return code is 8 when started task is not found using task_id. + returned: changed + type: int + sample: 0 +state: + description: + - The final state of the started task, after execution. + returned: success + type: str + sample: S SAMPLE +stderr: + description: + - The STDERR from the command, may be empty. + returned: failure + type: str + sample: An error has occurred. +stderr_lines: + description: + - List of strings containing individual lines from STDERR. + returned: failure + type: list + sample: ["An error has occurred"] +stdout: + description: + - The STDOUT from the command, may be empty. + returned: success + type: str + sample: ISF031I CONSOLE OMVS0000 ACTIVATED. +stdout_lines: + description: + - List of strings containing individual lines from STDOUT. + returned: success + type: list + sample: ["Allocation to SYSEXEC completed."] +tasks: + description: + - The output information for a list of started tasks matching specified criteria. + - If no started task is found then this will return empty. + returned: success + type: list + elements: dict + contains: + asidx: + description: + - Address space identifier (ASID), in hexadecimal. + type: str + sample: 0054 + cpu_time: + description: + - The processor time used by the address space, including the initiator. This time does not include SRB time. + - I(cpu_time) format is hhhhh.mm.ss.SSS(hours.minutes.seconds.milliseconds). + - C(********) when time exceeds 100000 hours. + - C(NOTAVAIL) when the TOD clock is not working. + type: str + sample: 00000.00.00.003 + elapsed_time: + description: + - The processor time used by the address space, including the initiator. This time does not include SRB time. + - I(elapsed_time) format is hhhhh.mm.ss.SSS(hours.minutes.seconds.milliseconds). + - C(********) when time exceeds 100000 hours. + - C(NOTAVAIL) when the TOD clock is not working. + type: str + sample: 00003.20.23.013 + started_time: + description: + - The time when the started task started. + - C(********) when time exceeds 100000 hours. + - C(NOTAVAIL) when the TOD clock is not working. + type: str + sample: "2025-09-11 18:21:50.293644+00:00" + task_id: + description: + - The started task id. + type: str + sample: STC00018 + task_identifier: + description: + - The name of a system address space. + - The name of a step, for a job or attached APPC transaction program attached by an initiator. + - The identifier of a task created by the START command. + - The name of a step that called a cataloged procedure. + - C(STARTING) if initiation of a started job, system task, or attached APPC transaction program is incomplete. + - C(*MASTER*) for the master address space. + - The name of an initiator address space. + type: str + sample: SPROC + task_name: + description: + - The name of the started task. + type: str + sample: SAMPLE +verbose_output: + description: + - If C(verbose=true), the system logs related to the started task executed state will be shown. + returned: success + type: str + sample: NC0000000 ZOSMACHINE 25240 12:40:30.15 OMVS0000 00000210.... +""" + +from ansible.module_utils.basic import AnsibleModule +import traceback +import re +from datetime import datetime, timedelta +import re +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( + better_arg_parser +) +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import ( + ZOAUImportError +) + +try: + from zoautil_py import opercmd, zsystem, jobs +except ImportError: + opercmd = ZOAUImportError(traceback.format_exc()) + zsystem = ZOAUImportError(traceback.format_exc()) + jobs = ZOAUImportError(traceback.format_exc()) + + +def execute_command(operator_cmd, started_task_name, asidx, execute_display_before=False, timeout_s=0, **kwargs): + """Execute operator command. + + Parameters + ---------- + operator_cmd : str + Operator command. + started_task_name : str + Name of the started task. + asidx : string + The HEX adress space identifier. + execute_display_before: bool + Indicates whether display command need to be executed before actual command or not. + timeout_s : int + Timeout to wait for the command execution, measured in centiseconds. + **kwargs : dict + More arguments for the command. + + Returns + ------- + tuple + Tuple containing the RC, standard out, standard err of the + query script and started task parameters. + """ + task_params = [] + # as of ZOAU v1.3.0, timeout is measured in centiseconds, therefore: + timeout_c = 100 * timeout_s + if execute_display_before: + task_params = execute_display_command(started_task_name, asidx) + response = opercmd.execute(operator_cmd, timeout_c, **kwargs) + + rc = response.rc + stdout = response.stdout_response + stderr = response.stderr_response + return rc, stdout, stderr, task_params + + +def execute_display_command(started_task_name, asidx=None, task_params_before=None, timeout=0): + """Execute operator display command. + + Parameters + ---------- + started_task_name : str + Name of the started task. + asidx : string + The HEX adress space identifier. + task_params_before: list + List of started task details which have same started task name. + timeout : int + Timeout to wait for the command execution, measured in centiseconds. + + Returns + ------- + list + List contains extracted parameters from display command output of started task + """ + cmd = f"d a,{started_task_name}" + display_response = opercmd.execute(cmd, timeout) + task_params = [] + if display_response.rc == 0 and display_response.stderr_response == "": + task_params = extract_keys(display_response.stdout_response, asidx, task_params_before) + return task_params + + +def validate_and_prepare_start_command(module): + """Validates parameters and creates start command + + Parameters + ---------- + module : dict + The started task start command parameters. + + Returns + ------- + started_task_name + The name of started task. + cmd + The start command in string format. + """ + member = module.params.get('member_name') + identifier = module.params.get('identifier_name') + job_name = module.params.get('job_name') + job_account = module.params.get('job_account') + parameters = module.params.get('parameters', []) + device_type = module.params.get('device_type') or "" + device_number = module.params.get('device_number') or "" + volume_serial = module.params.get('volume') or "" + subsystem_name = module.params.get('subsystem') + reus_asid = '' + if module.params.get('reus_asid') is not None: + if module.params.get('reus_asid'): + reus_asid = 'YES' + else: + reus_asid = 'NO' + keyword_parameters = module.params.get('keyword_parameters') + keyword_parameters_string = "" + device = device_type if device_type else device_number + # Validations + if job_account and len(job_account) > 55: + module.fail_json( + rc=5, + msg="The length of job_account exceeded 55 characters.", + changed=False + ) + if device_number: + devnum_len = len(device_number) + if devnum_len not in (3, 5) or (devnum_len == 5 and not device_number.startswith("/")): + module.fail_json( + rc=5, + msg="device_number should be 3 or 4 characters long and preceded by / when it is 4 characters long.", + changed=False + ) + if subsystem_name and len(subsystem_name) > 4: + module.fail_json( + rc=5, + msg="The subsystem_name must be 1-4 characters long.", + changed=False + ) + if keyword_parameters: + for key, value in keyword_parameters.items(): + key_len = len(key) + value_len = len(value) + if key_len > 44 or value_len > 44 or key_len + value_len > 65: + module.fail_json( + rc=5, + msg="The length of a keyword=option exceeded 66 characters or length of an individual value exceeded 44 characters." + + "key:{0}, value:{1}".format(key, value), + changed=False + ) + else: + if keyword_parameters_string: + keyword_parameters_string = f"{keyword_parameters_string},{key}={value}" + else: + keyword_parameters_string = f"{key}={value}" + if job_name: + started_task_name = f"{job_name}.{job_name}" + elif member: + started_task_name = member + if identifier: + started_task_name = f"{started_task_name}.{identifier}" + else: + started_task_name = f"{started_task_name}.{started_task_name}" + else: + module.fail_json( + rc=5, + msg="member_name is missing which is mandatory to start a started task.", + changed=False + ) + if not member: + module.fail_json( + rc=5, + msg="member_name is missing which is mandatory to start a started task.", + changed=False + ) + if job_name and identifier: + module.fail_json( + rc=5, + msg="job_name and identifier_name are mutually exclusive while starting a started task.", + changed=False + ) + parameters_updated = "" + if parameters: + if len(parameters) == 1: + parameters_updated = f"'{parameters[0]}'" + else: + parameters_updated = f"({','.join(parameters)})" + + cmd = f"S {member}" + if identifier: + cmd = f"{cmd}.{identifier}" + if parameters: + cmd = f"{cmd},{device},{volume_serial},{parameters_updated}" + elif volume_serial: + cmd = f"{cmd},{device},{volume_serial}" + elif device: + cmd = f"{cmd},{device}" + if job_name: + cmd = f"{cmd},JOBNAME={job_name}" + if job_account: + cmd = f"{cmd},JOBACCT={job_account}" + if subsystem_name: + cmd = f"{cmd},SUB={subsystem_name}" + if reus_asid: + cmd = f"{cmd},REUSASID={reus_asid}" + if keyword_parameters_string: + cmd = f"{cmd},{keyword_parameters_string}" + return started_task_name, cmd + + +def fetch_task_name_and_asidx(module, task_id): + """Executes JLS command and fetches task name + + Parameters + ---------- + module : dict + The started task display command parameters. + task_id : str + The started task id starts with STC. + + Returns + ------- + task_name + The name of started task. + """ + try: + task_details = jobs.fetch(task_id) + if not isinstance(task_details, jobs.Job): + module.fail_json( + rc=1, + msg=f"Fetching started task details using task_id: {task_id} is failed", + changed=False + ) + except Exception as err: + module.fail_json( + rc=err.response.rc, + msg=f"Fetching started task details using task_id: {task_id} is failed with ZOAU error: {err.response.stderr_response}", + changed=False + ) + task_name = task_details.name + asidx = f"{task_details.asid:04X}" + return task_name, asidx + + +def prepare_display_command(module): + """Validates parameters and creates display command + + Parameters + ---------- + module : dict + The started task display command parameters. + + Returns + ------- + started_task_name + The name of started task. + asidx + The address space identifier value, in hexadecimal. + cmd + The display command in string format. + """ + identifier = module.params.get('identifier_name') + job_name = module.params.get('job_name') + task_id = module.params.get('task_id') + started_task_name = "" + task_name = asidx = "" + if task_id: + task_name, asidx = fetch_task_name_and_asidx(module, task_id) + if task_name: + started_task_name = task_name + elif job_name: + started_task_name = job_name + if identifier: + started_task_name = f"{started_task_name}.{identifier}" + else: + module.fail_json( + rc=5, + msg="either job_name or task_id is mandatory to display started task details.", + changed=False + ) + cmd = f"D A,{started_task_name}" + return started_task_name, asidx, cmd + + +def prepare_stop_command(module, started_task=None, asidx=None, duplicate_tasks=False): + """Validates parameters and creates stop command + + Parameters + ---------- + module : dict + The started task stop command parameters. + started_task: string + The started task name. + asidx : string + The address space identifier value, in hexadecimal. + duplicate_tasks: bool + Indicates if duplicate tasks are running. + + Returns + ------- + started_task_name + The name of started task. + cmd + The stop command in string format. + """ + identifier = module.params.get('identifier_name') + job_name = module.params.get('job_name') + asidx = module.params.get('asidx') or asidx + started_task_name = "" + if started_task: + started_task_name = started_task + elif job_name: + started_task_name = job_name + if identifier: + started_task_name = f"{started_task_name}.{identifier}" + else: + started_task_name = f"{started_task_name}.{started_task_name}" + else: + module.fail_json( + rc=5, + msg="either job_name or task_id is mandatory to stop a running started task.", + changed=False + ) + cmd = f"P {started_task_name}" + if asidx or duplicate_tasks: + cmd = f"{cmd},A={asidx}" + return started_task_name, cmd + + +def prepare_modify_command(module, started_task=None): + """Validates parameters and creates modify command + + Parameters + ---------- + module : dict + The started task modify command parameters. + started_task: string + The started task name. + + Returns + ------- + started_task_name + The name of started task. + cmd + The modify command in string format. + """ + identifier = module.params.get('identifier_name') + job_name = module.params.get('job_name') + parameters = module.params.get('parameters') + started_task_name = "" + if started_task: + started_task_name = started_task + elif job_name: + started_task_name = job_name + if identifier: + started_task_name = f"{started_task_name}.{identifier}" + else: + started_task_name = f"{started_task_name}.{started_task_name}" + else: + module.fail_json( + rc=5, + msg="either job_name or task_id is mandatory to modify a running started task.", + changed=False + ) + if parameters is None: + module.fail_json( + rc=5, + msg="parameters are mandatory while modifying a started task.", + changed=False + ) + cmd = f"F {started_task_name},{','.join(parameters)}" + return started_task_name, cmd + + +def prepare_cancel_command(module, started_task=None, asidx=None, duplicate_tasks=False): + """Validates parameters and creates cancel command + + Parameters + ---------- + module : dict + The started task modify command parameters. + started_task: string + The started task name. + asidx : string + The address space identifier value, in hexadecimal. + duplicate_tasks: bool + Indicates if duplicate tasks are running. + + Returns + ------- + started_task_name + The name of started task. + cmd + The cancel command in string format. + """ + identifier = module.params.get('identifier_name') + job_name = module.params.get('job_name') + asidx = module.params.get('asidx') or asidx + dump = module.params.get('dump') + armrestart = module.params.get('armrestart') + userid = module.params.get('userid') + started_task_name = "" + if started_task: + started_task_name = started_task + elif job_name: + started_task_name = job_name + if identifier: + started_task_name = f"{started_task_name}.{identifier}" + else: + started_task_name = f"{started_task_name}.{started_task_name}" + elif userid: + started_task_name = f"U={userid}" + else: + module.fail_json( + rc=5, + msg="job_name, task_id and userid are missing, one of them is needed to cancel a task.", + changed=False + ) + if userid and armrestart: + module.fail_json( + rc=5, + msg="The ARMRESTART parameter is not valid with the U=userid parameter.", + changed=False + ) + cmd = f"C {started_task_name}" + if asidx or duplicate_tasks: + cmd = f"{cmd},A={asidx}" + if dump: + cmd = f"{cmd},DUMP" + if armrestart: + cmd = f"{cmd},ARMRESTART" + return started_task_name, cmd + + +def prepare_force_command(module, started_task=None, asidx=None, duplicate_tasks=False): + """Validates parameters and creates force command + + Parameters + ---------- + module : dict + The started task force command parameters. + started_task: string + The started task name. + asidx : string + The address space identifier value, in hexadecimal. + duplicate_tasks: bool + Indicates if duplicate tasks are running. + + Returns + ------- + started_task_name + The name of started task. + cmd + The force command in string format. + """ + identifier = module.params.get('identifier_name') + job_name = module.params.get('job_name') + asidx = module.params.get('asidx') or asidx + arm = module.params.get('arm') + armrestart = module.params.get('armrestart') + userid = module.params.get('userid') + tcb_address = module.params.get('tcb_address') + retry = '' + if module.params.get('retry_force') is not None: + if module.params.get('retry_force'): + retry = 'YES' + else: + retry = 'NO' + started_task_name = "" + if tcb_address and len(tcb_address) != 6: + module.fail_json( + rc=5, + msg="The TCB address of the task should be exactly 6-digit hexadecimal.", + changed=False + ) + if userid and armrestart: + module.fail_json( + rc=5, + msg="The ARMRESTART parameter is not valid with the U=userid parameter.", + changed=False + ) + if started_task: + started_task_name = started_task + elif job_name: + started_task_name = job_name + if identifier: + started_task_name = f"{started_task_name}.{identifier}" + else: + started_task_name = f"{started_task_name}.{started_task_name}" + elif userid: + started_task_name = f"U={userid}" + else: + module.fail_json( + rc=5, + msg="job_name, task_id and userid are missing, one of them is needed to force stop a running started task.", + changed=False + ) + cmd = f"FORCE {started_task_name}" + if asidx or duplicate_tasks: + cmd = f"{cmd},A={asidx}" + if arm: + cmd = f"{cmd},ARM" + if armrestart: + cmd = f"{cmd},ARMRESTART" + if tcb_address: + cmd = f"{cmd},TCB={tcb_address}" + if retry: + cmd = f"{cmd},RETRY={retry}" + return started_task_name, cmd + + +def extract_keys(stdout, asidx=None, task_params_before=None): + """Extracts keys and values from the given stdout + + Parameters + ---------- + stdout : string + The started task display command output + asidx : string + The address space identifier value, in hexadecimal. + task_params_before: list + List of started task details which have same started task name. + + Returns + ------- + tasks + The list of task parameters. + """ + keys = { + 'A': 'asidx', + 'CT': 'cpu_time', + 'ET': 'elapsed_time', + 'WUID': 'task_id' + } + lines = stdout.strip().split('\n') + tasks = [] + current_task = {} + task_header_regex = re.compile(r'^\s*(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)') + kv_pattern = re.compile(r'(\S+)=(\S+)') + for line in lines[5:]: + line = line.strip() + match_firstline = task_header_regex.search(line) + if len(line.split()) >= 5 and match_firstline: + if current_task: + current_task['started_time'] = "" + el_time = current_task.get('elapsed_time') + if el_time: + current_task['elapsed_time'] = convert_cpu_time(el_time) or current_task['elapsed_time'] + current_task['started_time'] = calculate_start_time(el_time) + if asidx: + if asidx == current_task.get('asidx'): + tasks.append(current_task) + current_task = {} + break + elif task_params_before: + current_asid = current_task.get('asidx') + task_exists_before = False + for task in task_params_before: + if task.get('asidx') == current_asid: + task_exists_before = True + break + if not task_exists_before: + tasks.append(current_task) + else: + tasks.append(current_task) + current_task = {} + current_task['task_name'] = match_firstline.group(1) + current_task['task_identifier'] = match_firstline.group(2) + for match in kv_pattern.finditer(line): + key, value = match.groups() + if key in keys: + key = keys[key] + current_task[key.lower()] = value + elif current_task: + for match in kv_pattern.finditer(line): + key, value = match.groups() + if key in keys: + key = keys[key] + current_task[key.lower()] = value + if current_task: + current_task['started_time'] = "" + el_time = current_task.get('elapsed_time') + if el_time: + current_task['elapsed_time'] = convert_cpu_time(el_time) or current_task['elapsed_time'] + current_task['started_time'] = calculate_start_time(el_time) + cpu_time = current_task.get('cpu_time') + if cpu_time: + current_task['cpu_time'] = convert_cpu_time(cpu_time) or current_task['cpu_time'] + if asidx: + if asidx == current_task.get('asidx'): + tasks.append(current_task) + elif task_params_before: + current_asid = current_task.get('asidx') + task_exists_before = False + for task in task_params_before: + if task.get('asidx') == current_asid: + task_exists_before = True + break + if not task_exists_before: + tasks.append(current_task) + else: + tasks.append(current_task) + return tasks + + +def parse_time(ts_str): + """Parse timestamp + + Parameters + ---------- + ts_str : string + The time stamp in string format + + Returns + ------- + timestamp + Transformed timestamp + """ + try: + # Case 1: Duration like "000.005seconds" + sec_match = re.match(r"^(\d+\.?\d*)\s*S?$", ts_str, re.IGNORECASE) + if sec_match: + return timedelta(seconds=float(sec_match.group(1))) + # Case 2: hh.mm.ss + hms_match = re.match(r"^(\d+).(\d{2}).(\d{2})$", ts_str) + if hms_match: + h, m, s = map(int, hms_match.groups()) + return timedelta(hours=h, minutes=m, seconds=s) + # Case 3: hhhhh.mm + hm_match = re.match(r"^(\d{1,5}).(\d{2})$", ts_str) + if hm_match: + h, m = map(int, hm_match.groups()) + return timedelta(hours=h, minutes=m) + except Exception: + return "" + return "" + + +def calculate_start_time(ts_str): + now = datetime.now().astimezone() + parsed = parse_time(ts_str) + if parsed is None: + return "" + # If it's a timedelta (duration), subtract from now → absolute datetime + if isinstance(parsed, timedelta): + return f"{now - parsed}" + return "" + + +def convert_cpu_time(ts_str): + parsed = parse_time(ts_str) + if parsed is None: + return "" + # If it's a timedelta (duration), subtract from now → absolute datetime + if isinstance(parsed, timedelta): + total_seconds = int(parsed.total_seconds()) + milliseconds = int(parsed.microseconds / 1000) + + hours = total_seconds // 3600 + minutes = (total_seconds % 3600) // 60 + seconds = total_seconds % 60 + + # Format: HHHHH.MM.SS.SSS + return f"{hours:05}.{minutes:02}.{seconds:02}.{milliseconds:03}" + return "" + + +def fetch_logs(command, timeout): + """Extracts keys and values from the given stdout + + Parameters + ---------- + command : string + The comand which need to be checked in system logs + timeout: int + The timeout value passed in input. + + Returns + ------- + str + Logs from SYSLOG + """ + time_mins = timeout // 60 + 1 + option = '-t' + str(time_mins) + stdout = zsystem.read_console(options=option) + stdout_lines = stdout.splitlines() + first = None + pattern = rf"\b{command}\b" + for i, line in enumerate(stdout_lines): + if re.search(pattern, line, re.IGNORECASE): + first = i + if not first: + return "" + logs = "\n".join(stdout_lines[first:]) + return logs + + +def run_module(): + """Initialize the module. + + Raises + ------ + fail_json + z/OS started task operation failed. + + Note: + 5 arguments(device_number, device_type, volume, retry_force, tcb_address) are commented due to + not tested those values in positive scenarios. These options will be enabled after successful + testing. Below Git issues are created to track this. + https://github.com/ansible-collections/ibm_zos_core/issues/2339 + https://github.com/ansible-collections/ibm_zos_core/issues/2340 + """ + module = AnsibleModule( + argument_spec={ + 'state': { + 'type': 'str', + 'required': True, + 'choices': ['started', 'stopped', 'modified', 'displayed', 'forced', 'cancelled'] + }, + 'arm': { + 'type': 'bool', + 'required': False + }, + 'armrestart': { + 'type': 'bool', + 'required': False + }, + 'asidx': { + 'type': 'str', + 'required': False + }, + # 'device_number': { + # 'type': 'str', + # 'required': False + # }, + # 'device_type': { + # 'type': 'str', + # 'required': False + # }, + 'dump': { + 'type': 'bool', + 'required': False + }, + 'identifier_name': { + 'type': 'str', + 'required': False, + 'aliases': ['identifier'] + }, + 'job_account': { + 'type': 'str', + 'required': False + }, + 'job_name': { + 'type': 'str', + 'required': False, + 'aliases': ['job', 'task_name', 'task'] + }, + 'keyword_parameters': { + 'type': 'dict', + 'required': False, + 'no_log': False + }, + 'member_name': { + 'type': 'str', + 'required': False, + 'aliases': ['member'] + }, + 'parameters': { + 'type': 'list', + 'elements': 'str', + 'required': False + }, + # 'retry_force': { + # 'type': 'bool', + # 'required': False + # }, + 'reus_asid': { + 'type': 'bool', + 'required': False + }, + 'subsystem': { + 'type': 'str', + 'required': False + }, + 'task_id': { + 'type': 'str', + 'required': False + }, + # 'tcb_address': { + # 'type': 'str', + # 'required': False + # }, + 'userid': { + 'type': 'str', + 'required': False + }, + 'verbose': { + 'type': 'bool', + 'required': False, + 'default': False + }, + # 'volume': { + # 'type': 'str', + # 'required': False + # }, + 'wait_time': { + 'type': 'int', + 'required': False, + 'default': 0 + } + }, + mutually_exclusive=[ + # ['device_number', 'device_type'], + ['job_name', 'task_id'], + ['identifier_name', 'task_id'] + ], + # required_by={'retry_force': ['tcb_address']}, + supports_check_mode=True + ) + + args_def = { + 'state': { + 'arg_type': 'str', + 'required': True + }, + 'arm': { + 'arg_type': 'bool', + 'required': False + }, + 'armrestart': { + 'arg_type': 'bool', + 'required': False + }, + 'asidx': { + 'arg_type': 'str', + 'required': False + }, + # 'device_number': { + # 'arg_type': 'str', + # 'required': False + # }, + # 'device_type': { + # 'arg_type': 'str', + # 'required': False + # }, + 'dump': { + 'arg_type': 'bool', + 'required': False + }, + 'identifier_name': { + 'arg_type': 'identifier_name', + 'required': False, + 'aliases': ['identifier'] + }, + 'job_account': { + 'arg_type': 'str', + 'required': False + }, + 'job_name': { + 'arg_type': 'str', + 'required': False, + 'aliases': ['job', 'task_name', 'task'] + }, + 'keyword_parameters': { + 'arg_type': 'basic_dict', + 'required': False + }, + 'member_name': { + 'arg_type': 'member_name', + 'required': False, + 'aliases': ['member'] + }, + 'parameters': { + 'arg_type': 'list', + 'elements': 'str', + 'required': False + }, + # 'retry_force': { + # 'arg_type': 'bool', + # 'required': False + # }, + 'reus_asid': { + 'arg_type': 'bool', + 'required': False + }, + 'subsystem': { + 'arg_type': 'str', + 'required': False + }, + 'task_id': { + 'type': 'str', + 'required': False + }, + # 'tcb_address': { + # 'arg_type': 'str', + # 'required': False + # }, + 'userid': { + 'arg_type': 'str', + 'required': False + }, + 'verbose': { + 'arg_type': 'bool', + 'required': False + }, + # 'volume': { + # 'arg_type': 'str', + # 'required': False + # }, + 'wait_time': { + 'arg_type': 'int', + 'required': False + } + } + + try: + parser = better_arg_parser.BetterArgParser(args_def) + parsed_args = parser.parse_args(module.params) + module.params = parsed_args + except ValueError as err: + module.fail_json( + msg='Parameter verification failed.', + stderr=str(err) + ) + state = module.params.get('state') + userid = module.params.get('userid') + wait_time_s = module.params.get('wait_time') + verbose = module.params.get('verbose') + kwargs = {} + # Fetch started task name if task_id is present in the request + task_id = module.params.get('task_id') + task_name = "" + asidx = module.params.get('asidx') + duplicate_tasks = False + started_task_name_from_id = "" + task_info = [] + if task_id and state != "displayed" and state != "started": + task_name, asidx = fetch_task_name_and_asidx(module, task_id) + task_params = execute_display_command(task_name) + if len(task_params) > 1: + duplicate_tasks = True + for task in task_params: + if task['asidx'] == asidx: + task_info.append(task) + started_task_name_from_id = f"{task['task_name']}.{task['task_identifier']}" + if not started_task_name_from_id: + module.fail_json( + rc=1, + msg="Started task of the given task_id is not active.", + changed=False + ) + """ + Below error messages or error codes are used to determine if response has any error. + + JCL ERROR - IEE122I: Response contains this keyword when JCL contains syntax error. + INVALID PARAMETER - IEE535I: When invalid parameter passed in command line. + NOT ACTIVE - IEE341I: When started task with the given job name is not active + REJECTED: When modify command is not supported by respective started task. + NOT LOGGED ON - IEE324I: When invalid userid passed in command. + DUPLICATE NAME FOUND - IEE842I: When multiple started tasks exist with same name. + NON-CANCELABLE - IEE838I: When cancel command can't stop job and force command is needed. + CANCELABLE - IEE838I: When force command used without using cancel command + """ + start_errmsg = ['IEE122I', 'IEE535I', 'IEE307I', 'ERROR', 'IEE708I'] + stop_errmsg = ['IEE341I', 'IEE535I', 'IEE708I'] + display_errmsg = ['IEE341I', 'IEE535I', 'NOT FOUND', 'IEE708I'] + modify_errmsg = ['REJECTED', 'IEE341I', 'IEE535I', 'IEE311I', 'IEE708I', 'ISF302E'] + cancel_errmsg = ['IEE341I', 'IEE324I', 'IEE535I', 'IEE842I', 'NON-CANCELABLE', 'IEE708I'] + force_errmsg = ['IEE341I', 'IEE324I', 'IEE535I', 'CANCELABLE', 'IEE842I', 'IEE708I'] + error_details = { + 'IEE122I': 'Specified member is missing or PROC/JOB contains incorrect JCL statements.', + 'IEE535I': 'A parameter on a command is not valid.', + 'IEE307I': 'Command parameter punctuation is incorrect or parameter is not followed by a blank.', + 'ERROR': 'Member is missing in PROCLIB or JCL is invalid or issue with JCL execution.', + 'NOT FOUND': 'Started task is not active', + 'IEE341I': 'Started task is not active', + 'REJECTED': 'Started task is not accepting modification.', + 'IEE324I': 'The userid specified on the command is not currently active in the system..', + 'IEE842I': 'More than one active job with the specified name exist.', + 'NON-CANCELABLE': 'The task cannot be canceled. Use the FORCE ARM command.', + 'CANCELABLE': 'The task can be canceled. Use the CANCEL command.', + 'IEE311I': 'Required parameter is missing.', + 'IEE708I': 'The value of a keyword specified on a command is incorrect.', + 'ISF302E': 'Parameters are invalid.' + } + err_msg = [] + kwargs = {} + + if wait_time_s: + kwargs.update({"wait": True}) + + cmd = "" + task_params_before = [] + execute_display_before = False + execute_display_after = False + if state == "started": + err_msg = start_errmsg + execute_display_after = True + started_task_name, cmd = validate_and_prepare_start_command(module) + task_params_before = execute_display_command(started_task_name) + elif state == "displayed": + err_msg = display_errmsg + started_task_name, asidx, cmd = prepare_display_command(module) + elif state == "stopped": + if not task_id: + execute_display_before = True + err_msg = stop_errmsg + started_task_name, cmd = prepare_stop_command(module, started_task_name_from_id, asidx, duplicate_tasks) + elif state == "cancelled": + if not userid: + if not task_id: + execute_display_before = True + err_msg = cancel_errmsg + started_task_name, cmd = prepare_cancel_command(module, started_task_name_from_id, asidx, duplicate_tasks) + elif state == "forced": + if not userid: + if not task_id: + execute_display_before = True + err_msg = force_errmsg + started_task_name, cmd = prepare_force_command(module, started_task_name_from_id, asidx, duplicate_tasks) + elif state == "modified": + execute_display_after = True + err_msg = modify_errmsg + started_task_name, cmd = prepare_modify_command(module, started_task_name_from_id) + changed = False + stdout = "" + stderr = "" + rc, out, err, task_params = execute_command(cmd, started_task_name, asidx, execute_display_before, timeout_s=wait_time_s, **kwargs) + is_failed = False + system_logs = "" + msg = "" + # Find failure + found_msg = next((msg for msg in err_msg if msg in out), None) + if err != "" or found_msg: + is_failed = True + # Fetch system logs to validate any error occured in execution + if not is_failed or verbose: + system_logs = fetch_logs(cmd.upper(), wait_time_s) + # If sysout is not having error, then check system log as well to make sure no error occured + if not is_failed: + found_msg = next((msg for msg in err_msg if msg in system_logs), None) + if found_msg: + is_failed = True + if not verbose: + system_logs = "" + current_state = "" + if is_failed: + if rc == 0: + rc = 1 + changed = False + msg = error_details.get(found_msg, found_msg) + stdout = out + stderr = err + if err == "" or err is None: + stderr = out + stdout = "" + else: + current_state = state + changed = True + stdout = out + stderr = err + if state == "displayed": + task_params = extract_keys(out, asidx) + elif execute_display_after: + task_params = execute_display_command(started_task_name, asidx, task_params_before) + + result = dict() + + if module.check_mode: + module.exit_json(**result) + + result = dict( + changed=changed, + state=current_state, + cmd=cmd, + tasks=task_info if task_id else task_params, + rc=rc, + stdout=stdout, + stderr=stderr, + stdout_lines=stdout.split('\n'), + stderr_lines=stderr.split('\n'), + verbose_output=system_logs + ) + if msg: + result['msg'] = msg + + module.exit_json(**result) + + +if __name__ == '__main__': + run_module() diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 9a422a2f04..c50850acef 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -206,8 +206,7 @@ def copy_rexx_and_run_commands(script, commands, module, max_rc): The command result details. """ command_detail_json = [] - delete_on_close = True - tmp_file = NamedTemporaryFile(delete=delete_on_close) + tmp_file = NamedTemporaryFile(delete=True) with open(tmp_file.name, "w") as f: f.write(script) chmod(tmp_file.name, S_IEXEC | S_IREAD | S_IWRITE) @@ -216,9 +215,11 @@ def copy_rexx_and_run_commands(script, commands, module, max_rc): command_results = {} command_results["command"] = command command_results["rc"] = rc - command_results["content"] = stdout.split("\n") - command_results["lines"] = len(command_results.get("content", [])) + command_results["stdout"] = stdout + command_results["stdout_lines"] = stdout.split("\n") + command_results["line_count"] = len(command_results.get("stdout_lines", [])) command_results["stderr"] = stderr + command_results["stderr_lines"] = stderr.split("\n") if rc <= max_rc: command_results["failed"] = False diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 970d789a6b..b8dc1ac2a8 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -46,7 +46,7 @@ type: dict required: true suboptions: - name: + type: description: - The compression format used while archiving. type: str @@ -59,7 +59,7 @@ - terse - xmit - pax - format_options: + options: description: - Options specific to a compression format. type: dict @@ -76,7 +76,7 @@ - When providing the I(xmit_log_data_set) name, ensure there is adequate space. type: str - use_adrdssu: + adrdssu: description: - If set to true, the C(zos_unarchive) module will use Data Facility Storage Management Subsystem data set services @@ -86,7 +86,7 @@ default: False dest_volumes: description: - - When I(use_adrdssu=True), specify the volume the data sets + - When I(adrdssu=True), specify the volume the data sets will be written to. - If no volume is specified, storage management rules will be used to determine the volume where the file will be @@ -378,14 +378,14 @@ zos_unarchive: src: "./files/archive_folder_test.tar" format: - name: tar + type: tar # use include - name: Unarchive a bzip file selecting only a file to unpack. zos_unarchive: src: "/tmp/test.bz2" format: - name: bz2 + type: bz2 include: - 'foo.txt' @@ -394,7 +394,7 @@ zos_unarchive: src: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse exclude: - USER.ARCHIVE.TEST1 - USER.ARCHIVE.TEST2 @@ -404,16 +404,16 @@ zos_unarchive: src: "USER.ARCHIVE(0)" format: - name: terse + type: terse # List option - name: List content from XMIT zos_unarchive: src: "USER.ARCHIVE.RESULT.XMIT" format: - name: xmit - format_options: - use_adrdssu: true + type: xmit + options: + adrdssu: true list: true # Encoding example @@ -421,7 +421,7 @@ zos_unarchive: src: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse encoding: from: IBM-1047 to: ISO8859-1 @@ -430,7 +430,7 @@ zos_unarchive: src: "USER.ARCHIVE.RESULT.TRS" format: - name: terse + type: terse encoding: from: IBM-1047 to: ISO8859-1 @@ -522,7 +522,7 @@ def __init__(self, module): Destination of the unarchive. format : str Name of the format of the module. - format_options : list[str] + options : list[str] Options of the format of the module. tmphql : str High level qualifier for temporary datasets. @@ -552,8 +552,8 @@ def __init__(self, module): self.module = module self.src = module.params.get("src") self.dest = module.params.get("dest") - self.format = module.params.get("format").get("name") - self.format_options = module.params.get("format").get("format_options") + self.format = module.params.get("format").get("type") + self.options = module.params.get("format").get("options") self.tmphlq = module.params.get("tmp_hlq") self.force = module.params.get("force") self.targets = list() @@ -888,7 +888,7 @@ def __init__(self, module): ---------- volumes : list[str] List of destination volumes. - use_adrdssu : bool + adrdssu : bool Whether to use Data Facility Storage Management Subsystem data set services program ADRDSSU to uncompress data sets or not. dest_dat_set : dict @@ -897,8 +897,8 @@ def __init__(self, module): Source size. """ super(MVSUnarchive, self).__init__(module) - self.volumes = self.format_options.get("dest_volumes") - self.use_adrdssu = self.format_options.get("use_adrdssu") + self.volumes = self.options.get("dest_volumes") + self.adrdssu = self.options.get("adrdssu") self.dest_data_set = module.params.get("dest_data_set") self.dest_data_set = dict() if self.dest_data_set is None else self.dest_data_set self.source_size = 0 @@ -1115,9 +1115,10 @@ def _restore(self, source): self.clean_environment(data_sets=[source], uss_files=[], remove_targets=True) self.module.fail_json( msg="Failed executing ADRDSSU to unarchive {0}. List of data sets not restored : {1}".format(source, unrestored_data_sets), - stdout=out, + stdout=f"command: {restore_cmd} \n stdout:{out}", stderr=err, - stdout_lines=restore_cmd, + stdout_lines=f"command: {restore_cmd} \n stdout:{out}".splitlines(), + stderr_lines=err.splitlines(), rc=rc, ) return rc @@ -1176,7 +1177,7 @@ def extract_src(self): """ temp_ds = "" - if not self.use_adrdssu: + if not self.adrdssu: temp_ds, rc = self._create_dest_data_set(**self.dest_data_set) rc = self.unpack(self.src, temp_ds) self.targets = [temp_ds] @@ -1322,6 +1323,8 @@ def unpack(self, src, dest): msg="Failed executing AMATERSE to restore {0} into {1}".format(src, dest), stdout=out, stderr=err, + stdout_lines=out.splitlines(), + stderr_lines=err.splitlines(), rc=rc, ) return rc @@ -1369,6 +1372,8 @@ def unpack(self, src, dest): msg="Failed executing RECEIVE to restore {0} into {1}".format(src, dest), stdout=out, stderr=err, + stdout_lines=out.splitlines(), + stderr_lines=err.splitlines(), rc=rc, ) return rc @@ -1391,7 +1396,7 @@ def get_unarchive_handler(module): ZipUnarchive The appropriate object type for any other format. """ - format = module.params.get("format").get("name") + format = module.params.get("format").get("type") if format in ["tar", "gz", "bz2", "pax"]: return TarUnarchive(module) elif format == "terse": @@ -1594,12 +1599,12 @@ def run_module(): type='dict', required=True, options=dict( - name=dict( + type=dict( type='str', required=True, choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] ), - format_options=dict( + options=dict( type='dict', required=False, options=dict( @@ -1611,7 +1616,7 @@ def run_module(): type='list', elements='str', ), - use_adrdssu=dict( + adrdssu=dict( type='bool', default=False, ) @@ -1698,13 +1703,13 @@ def run_module(): type='dict', required=True, options=dict( - name=dict( + type=dict( type='str', required=True, default='gz', choices=['bz2', 'gz', 'tar', 'zip', 'terse', 'xmit', 'pax'] ), - format_options=dict( + options=dict( type='dict', required=False, options=dict( @@ -1716,7 +1721,7 @@ def run_module(): type='list', elements='str' ), - use_adrdssu=dict( + adrdssu=dict( type='bool', default=False, ), @@ -1724,7 +1729,7 @@ def run_module(): default=dict(xmit_log_data_set=""), ) ), - default=dict(name="", format_options=dict(xmit_log_data_set="")), + default=dict(type="", options=dict(xmit_log_data_set="")), ), dest_data_set=dict( arg_type='dict', diff --git a/tests/conftest.py b/tests/conftest.py index f8ba410d5d..39c8741e1d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,7 +14,7 @@ __metaclass__ = type import pytest from ibm_zos_core.tests.helpers.ztest import ZTestHelper -from ibm_zos_core.tests.helpers.volumes import get_volumes, get_volumes_with_vvds, get_volume_and_unit +from ibm_zos_core.tests.helpers.volumes import get_volumes, get_volumes_with_vvds, get_volume_and_unit, get_volumes_sms_mgmt_class from ansible.plugins.action import ActionBase import sys from mock import MagicMock @@ -167,12 +167,30 @@ def volumes_unit_on_systems(ansible_zos_module, request): if path is None: src = request.config.getoption("--zinventory-raw") helper = ZTestHelper.from_args(src) - list_volumes = helper.get_volume_and_unit() + list_volumes = helper.get_volumes_list() else: - list_volumes = get_volume_and_unit(ansible_zos_module, path) + list_volumes = get_volume_and_unit(ansible_zos_module) yield list_volumes + +@pytest.fixture(scope="session") +def volumes_sms_systems(ansible_zos_module, request): + """ Call the pytest-ansible plugin to check volumes on the system and work properly a list by session.""" + path = request.config.getoption("--zinventory") + list_volumes = None + + if path is None: + src = request.config.getoption("--zinventory-raw") + helper = ZTestHelper.from_args(src) + list_volumes = helper.get_volumes_list() + else: + list_volumes = get_volumes(ansible_zos_module, path) + + volumes_with_sms = get_volumes_sms_mgmt_class(ansible_zos_module, list_volumes) + yield volumes_with_sms + + # * We no longer edit sys.modules directly to add zoautil_py mock # * because automatic teardown is not performed, leading to mock pollution # * across test files. diff --git a/tests/functional/modules/test_zos_apf_func.py b/tests/functional/modules/test_zos_apf_func.py index 8333ce3625..b2c00bc499 100644 --- a/tests/functional/modules/test_zos_apf_func.py +++ b/tests/functional/modules/test_zos_apf_func.py @@ -14,7 +14,6 @@ from __future__ import absolute_import, division, print_function from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name from ibm_zos_core.tests.helpers.volumes import Volume_Handler -from ibm_zos_core.tests.helpers.version import get_zoau_version __metaclass__ = type @@ -48,7 +47,7 @@ def clean_test_env(hosts, test_info): cmd_str = f"drm '{test_info['library']}' " hosts.all.shell(cmd=cmd_str) if test_info.get('persistent'): - cmd_str = f"drm '{test_info['persistent']['data_set_name']}' " + cmd_str = f"drm '{test_info['persistent']['target']}' " hosts.all.shell(cmd=cmd_str) @@ -79,10 +78,14 @@ def test_add_del(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): @@ -103,7 +106,7 @@ def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds): "force_dynamic":True, "tmp_hlq":"", "persistent":{ - "data_set_name":"", + "target":"", "backup":True } } @@ -125,11 +128,15 @@ def test_add_del_with_tmp_hlq_option(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 assert result.get("backup_name")[:6] == tmphlq + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): @@ -167,11 +174,15 @@ def test_add_del_volume(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): @@ -218,7 +229,7 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): "library":"", "volume":"", "persistent":{ - "data_set_name":"", + "target":"", "marker":"/* {mark} BLOCK */"}, "state":"present", "force_dynamic":True @@ -240,13 +251,13 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 add_exptd = ADD_EXPECTED.format(test_info['library'], test_info['volume']) add_exptd = add_exptd.replace(" ", "") - cmd_str = f"cat \"//'{test_info['persistent']['data_set_name']}'\" " + cmd_str = f"cat \"//'{test_info['persistent']['target']}'\" " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") @@ -256,8 +267,12 @@ def test_add_del_volume_persist(ansible_zos_module, volumes_with_vvds): results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] del_exptd = DEL_EXPECTED.replace(" ", "") - cmd_str = f"cat \"//'{test_info['persistent']['data_set_name']}'\" " + cmd_str = f"cat \"//'{test_info['persistent']['target']}'\" " results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") @@ -288,7 +303,7 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): } ], "persistent":{ - "data_set_name":"", + "target":"", "marker":"/* {mark} BLOCK */" }, "state":"present", @@ -307,10 +322,14 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] add_exptd = ADD_BATCH_EXPECTED.format( test_info['batch'][0]['library'], test_info['batch'][0]['volume'], @@ -320,7 +339,7 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): test_info['batch'][2]['volume'] ) add_exptd = add_exptd.replace(" ", "") - cmd_str = f"""dcat '{test_info["persistent"]["data_set_name"]}' """ + cmd_str = f"""dcat '{test_info["persistent"]["target"]}' """ results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") @@ -330,8 +349,12 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] del_exptd = DEL_EXPECTED.replace(" ", "") - cmd_str = f"""dcat '{test_info["persistent"]["data_set_name"]}' """ + cmd_str = f"""dcat '{test_info["persistent"]["target"]}' """ results = hosts.all.shell(cmd=cmd_str) for result in results.contacted.values(): actual = result.get("stdout") @@ -340,7 +363,7 @@ def test_batch_add_del(ansible_zos_module, volumes_with_vvds): finally: for item in test_info['batch']: clean_test_env(hosts, item) - hosts.all.shell(cmd=f"drm '{test_info['persistent']['data_set_name']}' ") + hosts.all.shell(cmd=f"drm '{test_info['persistent']['target']}' ") def test_operation_list(ansible_zos_module): @@ -351,6 +374,11 @@ def test_operation_list(ansible_zos_module): } results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): + assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] list_json = result.get("stdout") data = json.loads(list_json) assert data['format'] in ['DYNAMIC', 'STATIC'] @@ -385,7 +413,7 @@ def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds hosts.all.zos_apf(**test_info) ti = { "operation":"list", @@ -394,6 +422,11 @@ def test_operation_list_with_filter(ansible_zos_module, volumes_with_vvds): ti['library'] = "ANSIBLE.*" results = hosts.all.zos_apf(**ti) for result in results.contacted.values(): + assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] list_filtered = result.get("stdout") assert test_info['library'] in list_filtered test_info['state'] = 'absent' @@ -433,24 +466,22 @@ def test_add_already_present(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") == '' + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") == [''] # Second call to zos_apf, same as first but with different expectations results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - # RC 0 should be allowed for ZOAU >= 1.3.4, - # in zoau < 1.3.4 -i is not recognized in apfadm - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 - zoa_version = get_zoau_version(hosts) or "0.0.0.0" - rc = result.get("rc") - if zoa_version >= "1.3.4.0": - assert rc == 0 - elif zoa_version >= "1.2.0.0": - assert rc == 8 - else: - assert rc == 16 + assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") is not None + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") is not None test_info['state'] = 'absent' hosts.all.zos_apf(**test_info) finally: @@ -484,21 +515,15 @@ def test_del_not_present(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds test_info['state'] = 'absent' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - # RC 0 should be allowed for ZOAU >= 1.3.4, - # in zoau < 1.3.4 -i is not recognized in apfadm - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 - zoa_version = get_zoau_version(hosts) or "0.0.0.0" - rc = result.get("rc") - if zoa_version >= "1.3.4.0": - assert rc == 0 - elif zoa_version >= "1.2.0.0": - assert rc == 8 - else: - assert rc == 16 + assert result.get("rc") == 0 + assert result.get("stdout") is not None + assert result.get("stderr") is not None + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") is not None finally: clean_test_env(hosts, test_info) @@ -513,7 +538,10 @@ def test_add_not_found(ansible_zos_module): test_info['library'] = f'{TEST_HLQ}.FOO.BAR' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 + assert result.get("stdout") is not None + assert result.get("stderr") is not None + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") is not None assert result.get("rc") == 16 or result.get("rc") == 8 @@ -546,11 +574,14 @@ def test_add_with_wrong_volume(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds test_info['volume'] = 'T12345' results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): - # Return code 16 if ZOAU < 1.2.0 and RC is 8 if ZOAU >= 1.2.0 + assert result.get("stdout") is not None + assert result.get("stderr") is not None + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") is not None assert result.get("rc") == 16 or result.get("rc") == 8 finally: clean_test_env(hosts, test_info) @@ -564,7 +595,7 @@ def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds): test_info = { "library":"", "persistent":{ - "data_set_name":"", + "target":"", "marker":"/* {mark} BLOCK */" }, "state":"present", @@ -588,13 +619,17 @@ def test_persist_invalid_ds_format(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds - ds_name = test_info['persistent']['data_set_name'] + test_info['persistent']['target'] = prstds + ds_name = test_info['persistent']['target'] cmd_str =f"decho \"some text to test persistent data_set format validation.\" \"{ds_name}\"" hosts.all.shell(cmd=cmd_str) results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 8 + assert result.get("stdout") is not None + assert result.get("stderr") is not None + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") is not None finally: clean_test_env(hosts, test_info) @@ -607,7 +642,7 @@ def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds): test_info = { "library":"", "persistent":{ - "data_set_name":"", + "target":"", "marker":"/* {mark} BLOCK */" }, "state":"present", @@ -631,11 +666,15 @@ def test_persist_invalid_marker(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds test_info['persistent']['marker'] = "# Invalid marker format" results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): assert result.get("rc") == 4 + assert result.get("stdout") is not None + assert result.get("stderr") is not None + assert result.get("stdout_lines") is not None + assert result.get("stderr_lines") is not None finally: clean_test_env(hosts, test_info) @@ -648,7 +687,7 @@ def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds): test_info = { "library":"", "persistent":{ - "data_set_name":"", + "target":"", "marker":"/* {mark} BLOCK */" }, "state":"present", @@ -672,10 +711,11 @@ def test_persist_invalid_marker_len(ansible_zos_module, volumes_with_vvds): prstds = prstds[:30] cmd_str = f"dtouch -tseq '{prstds}' " hosts.all.shell(cmd=cmd_str) - test_info['persistent']['data_set_name'] = prstds + test_info['persistent']['target'] = prstds test_info['persistent']['marker'] = "/* {mark} This is a awfully lo%70sng marker */" % ("o") results = hosts.all.zos_apf(**test_info) for result in results.contacted.values(): + assert result.get("failed") is True assert result.get("msg") == 'marker length may not exceed 72 characters' finally: clean_test_env(hosts, test_info) diff --git a/tests/functional/modules/test_zos_archive_func.py b/tests/functional/modules/test_zos_archive_func.py index 96157390dc..28c7811220 100644 --- a/tests/functional/modules/test_zos_archive_func.py +++ b/tests/functional/modules/test_zos_archive_func.py @@ -131,7 +131,7 @@ def test_uss_single_archive(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) @@ -162,7 +162,7 @@ def test_uss_single_archive_with_mode(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format }, mode=dest_mode ) @@ -191,7 +191,7 @@ def test_uss_single_archive_with_force_option(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) @@ -203,7 +203,7 @@ def test_uss_single_archive_with_force_option(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) @@ -216,7 +216,7 @@ def test_uss_single_archive_with_force_option(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format }, force=True, ) @@ -257,7 +257,7 @@ def test_uss_archive_multiple_files(ansible_zos_module, ds_format, path): src=path.get("files"), dest=dest, format={ - "name":ds_format + "type":ds_format }, ) @@ -302,7 +302,7 @@ def test_uss_archive_multiple_files_with_exclude(ansible_zos_module, ds_format, src=path.get("files"), dest=dest, format={ - "name":ds_format + "type":ds_format }, exclude=path.get("exclude") ) @@ -337,7 +337,7 @@ def test_uss_archive_remove_targets(ansible_zos_module, ds_format): src=paths, dest=dest, format={ - "name":ds_format + "type":ds_format }, remove=True ) @@ -367,7 +367,7 @@ def test_uss_archive_encode(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format }, encoding={ "from": TO_ENCODING, @@ -401,7 +401,7 @@ def test_uss_archive_encode_skip_encoding(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format }, encoding={ "from": FROM_ENCODING, @@ -432,10 +432,10 @@ def test_uss_archive_encode_skip_encoding(ansible_zos_module, ds_format): # List of tests: # - test_mvs_archive_single_dataset -# - test_mvs_archive_single_dataset_use_adrdssu +# - test_mvs_archive_single_dataset_adrdssu # - test_mvs_archive_single_data_set_remove_target # - test_mvs_archive_multiple_data_sets -# - test_mvs_archive_multiple_data_sets_use_adrdssu +# - test_mvs_archive_multiple_data_sets_adrdssu # - test_mvs_archive_multiple_data_sets_remove_target # - test_mvs_archive_multiple_data_sets_with_exclusion # - test_mvs_archive_multiple_data_sets_with_missing @@ -519,11 +519,11 @@ def test_mvs_archive_single_dataset( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=src_data_set, @@ -571,7 +571,7 @@ def test_mvs_archive_single_dataset( @pytest.mark.parametrize( "record_format", ["fb", "vb"], ) -def test_mvs_archive_single_dataset_use_adrdssu( +def test_mvs_archive_single_dataset_adrdssu( ansible_zos_module, ds_format, data_set, @@ -617,13 +617,13 @@ def test_mvs_archive_single_dataset_use_adrdssu( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } - format_dict["format_options"] = { - "use_adrdssu":True + format_dict["options"] = { + "adrdssu":True } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") + format_dict["options"].update(spack=True) archive_result = hosts.all.zos_archive( src=src_data_set, dest=archive_data_set, @@ -699,11 +699,11 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, ds_format hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=src_data_set, @@ -719,9 +719,16 @@ def test_mvs_archive_single_data_set_remove_target(ansible_zos_module, ds_format assert src_data_set in result.get("archived") cmd_result = hosts.all.shell(cmd = f"dls '{hlq}.*'") + # Changed to using the exact data set name in dls + # because using wildcards would fail. + # Assert archive data set is in place + cmd_result = hosts.all.shell(cmd = f"dls {archive_data_set}") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") - assert src_data_set != c_result.get("stdout") + # Assert src_data_set is removed + cmd_result = hosts.all.shell(cmd = f"dls '{src_data_set}'") + for c_result in cmd_result.contacted.values(): + assert f"BGYSC1103E No datasets match pattern: {src_data_set}." in c_result.get("stderr") finally: hosts.all.zos_data_set(name=src_data_set, state="absent") hosts.all.zos_data_set(name=archive_data_set, state="absent") @@ -773,12 +780,12 @@ def test_mvs_archive_multiple_data_sets(ansible_zos_module, ds_format, data_set) hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{src_data_set}*", dest=archive_data_set, @@ -846,12 +853,12 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, ds_fo hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) exclude = f"{src_data_set}1" archive_result = hosts.all.zos_archive( src=f"{src_data_set}*", @@ -871,6 +878,7 @@ def test_mvs_archive_multiple_data_sets_with_exclusion(ansible_zos_module, ds_fo assert ds.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = f"dls '{hlq}.*'") + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -924,12 +932,12 @@ def test_mvs_archive_multiple_data_sets_and_remove(ansible_zos_module, ds_format hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{src_data_set}*", dest=archive_data_set, @@ -1004,12 +1012,12 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, ds_form path_list = [ds.get("name") for ds in target_ds_list] format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=path_list, dest=archive_data_set, @@ -1029,6 +1037,7 @@ def test_mvs_archive_multiple_data_sets_with_missing(ansible_zos_module, ds_form assert ds.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = f"dls '{hlq}.*'") + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -1092,11 +1101,11 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, ds_format, da hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) @@ -1124,6 +1133,7 @@ def test_mvs_archive_single_dataset_force_lock(ansible_zos_module, ds_format, da assert src_data_set in result.get("archived") cmd_result = hosts.all.shell(cmd = f"dls '{hlq}.*'") + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}'") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") @@ -1165,10 +1175,10 @@ def test_gdg_archive(ansible_zos_module, dstype, format): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - format_dict = dict(name=format, format_options=dict()) + format_dict = dict(type=format, options=dict()) if format == "terse": - format_dict["format_options"] = dict(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"] = dict(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=[f"{data_set_name}(0)",f"{data_set_name}(-1)" ], dest=archive_data_set, @@ -1179,7 +1189,7 @@ def test_gdg_archive(ansible_zos_module, dstype, format): assert result.get("dest") == archive_data_set assert f"{data_set_name}.G0001V00" in result.get("archived") assert f"{data_set_name}.G0002V00" in result.get("archived") - cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(hlq)) + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}' ") for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") finally: @@ -1208,10 +1218,10 @@ def test_archive_into_gds(ansible_zos_module, dstype, format): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - format_dict = dict(name=format, format_options=dict()) + format_dict = dict(type=format, options=dict()) if format == "terse": - format_dict["format_options"] = dict(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"] = dict(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=data_set_name, dest=f"{archive_data_set}(+1)", @@ -1220,9 +1230,9 @@ def test_archive_into_gds(ansible_zos_module, dstype, format): for result in archive_result.contacted.values(): assert result.get("changed") is True assert data_set_name in result.get("archived") - cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(hlq)) + cmd_result = hosts.all.shell(cmd = f"dls '{archive_data_set}*' ") for c_result in cmd_result.contacted.values(): - assert archive_data_set in c_result.get("stdout") + assert f"{archive_data_set}.G0001V00" in c_result.get("stdout") finally: hosts.all.shell(cmd=f"drm {hlq}.*") @@ -1289,11 +1299,11 @@ def test_mvs_archive_single_dataset_encoding( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=src_data_set, @@ -1364,9 +1374,9 @@ def test_mvs_archive_multiple_dataset_pattern_encoding(ansible_zos_module, ds_fo ds_target = f"{ds_name}({member})" if member else ds_name hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_target}\"") - format_dict = {"name": ds_format} + format_dict = {"type": ds_format} if ds_format == "terse": - format_dict["format_options"] = {"terse_pack": "spack"} + format_dict["options"] = {"spack": True} for ds_name in matched_datasets: archive_data_set = get_tmp_ds_name() archive_result = hosts.all.zos_archive( @@ -1438,14 +1448,14 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_skip_encoding(ansible_zos ds_target = f"{ds_name}({member})" if member else ds_name hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_target}\"") - format_dict = {"name": ds_format} + format_dict = {"type": ds_format} if ds_format == "terse": - format_dict["format_options"] = {"terse_pack": "spack"} - #skipping some files to encode + format_dict["options"] = {"spack": True} + #skipping some files to encode skip_encoding_list = [matched_datasets[0]] current_encoding_config = encoding.copy() current_encoding_config["skip_encoding"] = skip_encoding_list - + for ds_name in matched_datasets: archive_data_set = get_tmp_ds_name() archive_result = hosts.all.zos_archive( @@ -1511,16 +1521,16 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib type="member", state="present" ) - + test_line = "pattern match" for ds_name in all_datasets_to_process: for member in data_set.get("members"): ds_target = f"{ds_name}({member})" if member else ds_name hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_target}\"") - format_dict = {"name": ds_format} + format_dict = {"type": ds_format} if ds_format == "terse": - format_dict["format_options"] = {"terse_pack": "spack"} + format_dict["options"] = {"spack": True} for ds_name in matched_datasets: original_hex_result = hosts.all.shell(cmd=f"dcat '{ds_name}' | od -x") @@ -1548,7 +1558,7 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib original_hex.append('*') else: parts = line.split() - if len(parts) > 1: + if len(parts) > 1: original_hex.extend(parts[1:]) reverted_hex = [] @@ -1564,13 +1574,13 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib try: original_first_star_idx = original_hex.index('*') except ValueError: - original_first_star_idx = len(original_hex) + original_first_star_idx = len(original_hex) try: reverted_first_star_idx = reverted_hex.index('*') except ValueError: reverted_first_star_idx = len(reverted_hex) - + original_hex_to_compare = original_hex[:original_first_star_idx] reverted_hex_to_compare = reverted_hex[:reverted_first_star_idx] @@ -1589,4 +1599,4 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansib for ds_name in matched_datasets: hosts.all.zos_data_set(name=ds_name, state="absent") for archive_ds in archived_datasets: - hosts.all.zos_data_set(name=archive_ds, state="absent") \ No newline at end of file + hosts.all.zos_data_set(name=archive_ds, state="absent") diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index c25c6f0597..b42bfb94bc 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -27,6 +27,7 @@ import time import json from ibm_zos_core.tests.helpers.utils import get_random_file_name +from ibm_zos_core.tests.helpers.volumes import Volume_Handler DATA_SET_CONTENTS = "HELLO WORLD" TMP_DIRECTORY = "/tmp/" @@ -72,9 +73,9 @@ def create_sequential_data_set_with_contents( hosts, data_set_name, contents, volume=None ): if volume is not None: - results = hosts.all.zos_data_set(name=data_set_name, type="seq", volumes=volume) + results = hosts.all.shell(cmd=f"dtouch -tseq -V{volume} '{data_set_name}'") else: - results = hosts.all.zos_data_set(name=data_set_name, type="seq") + results = hosts.all.shell(cmd=f"dtouch -tseq '{data_set_name}'") assert_module_did_not_fail(results) results = hosts.all.shell("decho '{0}' {1}".format(contents, data_set_name)) assert_module_did_not_fail(results) @@ -85,6 +86,11 @@ def create_file_with_contents(hosts, path, contents): assert_module_did_not_fail(results) +def create_vsam(hosts, data_set_name): + results = hosts.all.shell(cmd=f"dtouch -tksds -k4:0 {data_set_name}") + assert_module_did_not_fail(results) + + def delete_data_set_or_file(hosts, name): if name.startswith("/"): delete_file(hosts, name) @@ -93,7 +99,7 @@ def delete_data_set_or_file(hosts, name): def delete_data_set(hosts, data_set_name): - hosts.all.zos_data_set(name=data_set_name, state="absent") + hosts.all.shell(cmd=f"drm -F '{data_set_name}'") def delete_file(hosts, path): @@ -162,7 +168,6 @@ def assert_data_set_or_file_does_not_exist(hosts, name): def assert_data_set_exists(hosts, data_set_name): results = hosts.all.shell("dls '{0}'".format(data_set_name.upper())) for result in results.contacted.values(): - print(result) found = search( "^{0}$".format(data_set_name), result.get("stdout"), IGNORECASE | MULTILINE ) @@ -926,83 +931,138 @@ def test_backup_and_restore_a_data_set_with_same_hlq(ansible_zos_module): delete_remnants(hosts) -# def test_backup_and_restore_of_data_set_from_volume_to_new_volume(ansible_zos_module): -# hosts = ansible_zos_module -# try: -# delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) -# delete_data_set_or_file(hosts, data_set_name) -# delete_data_set_or_file(hosts, data_set_name2) -# delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) -# delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) -# create_sequential_data_set_with_contents( -# hosts, data_set_name, DATA_SET_CONTENTS, VOLUME -# ) -# create_sequential_data_set_with_contents( -# hosts, data_set_name2, DATA_SET_CONTENTS, VOLUME2 -# ) -# results = hosts.all.zos_backup_restore( -# operation="backup", -# data_sets=dict(include=DATA_SET_PATTERN), -# volume=VOLUME, -# backup_name=DATA_SET_BACKUP_LOCATION, -# overwrite=True, -# ) -# assert_module_did_not_fail(results) -# assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) -# results = hosts.all.zos_backup_restore( -# operation="restore", -# backup_name=DATA_SET_BACKUP_LOCATION, -# overwrite=True, -# volume=VOLUME, -# hlq=NEW_HLQ, -# ) -# assert_module_did_not_fail(results) -# assert_data_set_exists(hosts, DATA_SET_RESTORE_LOCATION) -# assert_data_set_does_not_exist(hosts, DATA_SET_RESTORE_LOCATION2) -# finally: -# delete_data_set_or_file(hosts, data_set_name) -# delete_data_set_or_file(hosts, data_set_name2) -# delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION) -# delete_data_set_or_file(hosts, DATA_SET_RESTORE_LOCATION2) -# delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) +def test_backup_and_restore_of_data_set_from_volume_to_new_volume(ansible_zos_module, volumes_on_systems): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_restore_location = get_tmp_ds_name() + hlqs = "TMPHLQ" + try: + volumes = Volume_Handler(volumes_on_systems) + volume_1 = volumes.get_available_vol() + volume_2 = volumes.get_available_vol() + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_restore_location) + create_sequential_data_set_with_contents( + hosts, data_set_name, DATA_SET_CONTENTS, volume_1 + ) + results = hosts.all.zos_backup_restore( + operation="backup", + data_sets=dict(include=data_set_name), + volume=volume_1, + backup_name=data_set_restore_location, + overwrite=True, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, data_set_restore_location) + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=data_set_restore_location, + overwrite=True, + volume=volume_2, + hlq=hlqs, + ) + assert_module_did_not_fail(results) + assert_data_set_exists(hosts, data_set_restore_location) + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_restore_location) + delete_remnants(hosts, hlqs) -# def test_backup_and_restore_of_full_volume(ansible_zos_module): -# hosts = ansible_zos_module -# try: -# delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) -# delete_data_set_or_file(hosts, data_set_name) -# create_sequential_data_set_with_contents( -# hosts, data_set_name, DATA_SET_CONTENTS, VOLUME -# ) -# results = hosts.all.zos_backup_restore( -# operation="backup", -# volume=VOLUME, -# full_volume=True, -# sms_storage_class="DB2SMS10", -# backup_name=DATA_SET_BACKUP_LOCATION, -# overwrite=True, -# space=500, -# space_type="m", -# ) -# assert_module_did_not_fail(results) -# assert_data_set_or_file_exists(hosts, DATA_SET_BACKUP_LOCATION) -# delete_data_set_or_file(hosts, data_set_name) -# results = hosts.all.zos_backup_restore( -# operation="restore", -# backup_name=DATA_SET_BACKUP_LOCATION, -# overwrite=True, -# volume=VOLUME, -# full_volume=True, -# sms_storage_class="DB2SMS10", -# space=500, -# space_type="m", -# ) -# assert_module_did_not_fail(results) -# assert_data_set_exists_on_volume(hosts, data_set_name, VOLUME) -# finally: -# delete_data_set_or_file(hosts, data_set_name) -# delete_data_set_or_file(hosts, DATA_SET_BACKUP_LOCATION) +def test_backup_and_restore_of_sms_group(ansible_zos_module, volumes_sms_systems): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_backup_location = get_tmp_ds_name() + try: + volumes = Volume_Handler(volumes_sms_systems) + volume, smsgrp = volumes.get_available_vol_with_sms() + delete_data_set_or_file(hosts, data_set_backup_location) + delete_data_set_or_file(hosts, data_set_name) + create_sequential_data_set_with_contents( + hosts, data_set_name, DATA_SET_CONTENTS, volume + ) + sms = {"storage_class":smsgrp} + results = hosts.all.zos_backup_restore( + data_sets=dict(include=data_set_name), + operation="backup", + volume=volume, + backup_name=data_set_backup_location, + overwrite=True, + sms=sms, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, data_set_backup_location) + delete_data_set_or_file(hosts, data_set_name) + sms = { + "disable_automatic_class":[data_set_name], + "disable_automatic_storage_class":True + } + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=data_set_backup_location, + overwrite=True, + volume=volume, + sms=sms, + ) + assert_module_did_not_fail(results) + assert_data_set_exists_on_volume(hosts, data_set_name, volume) + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_backup_location) + + +def test_backup_and_restore_all_of_sms_group(ansible_zos_module, volumes_sms_systems): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_backup_location = get_tmp_ds_name() + try: + volumes = Volume_Handler(volumes_sms_systems) + volume, smsgrp = volumes.get_available_vol_with_sms() + delete_data_set_or_file(hosts, data_set_backup_location) + delete_data_set_or_file(hosts, data_set_name) + create_sequential_data_set_with_contents( + hosts, data_set_name, DATA_SET_CONTENTS, volume + ) + sms = {"storage_class":smsgrp} + for attempt in range(2): + results = hosts.all.zos_backup_restore( + data_sets=dict(include=data_set_name), + operation="backup", + volume=volume, + backup_name=data_set_backup_location, + overwrite=True, + sms=sms, + ) + for result in results.contacted.values(): + if result.get("failed", False) is not True: + break + else: + if smsgrp == "PRIMARY": + sms = {"storage_class":"DB2SMS10"} + else: + sms = {"storage_class":"PRIMARY"} + sc = sms["storage_class"] + if sc not in {"DB2SMS10", "PRIMARY"}: + pytest.skip(f"Skipping test: unsupported storage_class {sc}") + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, data_set_backup_location) + delete_data_set_or_file(hosts, data_set_name) + sms = { + "disable_automatic_class":['**'], + "disable_automatic_storage_class":True + } + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=data_set_backup_location, + overwrite=True, + volume=volume, + sms=sms, + ) + assert_module_did_not_fail(results) + assert_data_set_exists_on_volume(hosts, data_set_name, volume) + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_backup_location) @pytest.mark.parametrize("dstype", ["seq", "pds", "pdse"]) @@ -1012,15 +1072,15 @@ def test_backup_gds(ansible_zos_module, dstype): # We need to replace hyphens because of NAZARE-10614: dzip fails archiving data set names with '-' data_set_name = get_tmp_ds_name(symbols=True).replace("-", "") backup_dest = get_tmp_ds_name(symbols=True).replace("-", "") - results = hosts.all.zos_data_set(name=data_set_name, state="present", type="gdg", limit=3) + results = hosts.all.shell(cmd=f"dtouch -tGDG -L3 '{data_set_name}'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{data_set_name}(+1)'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{data_set_name}(+1)'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None @@ -1048,15 +1108,15 @@ def test_backup_into_gds(ansible_zos_module, dstype): # We need to replace hyphens because of NAZARE-10614: dzip fails archiving data set names with '-' data_set_name = get_tmp_ds_name(symbols=True).replace("-", "") ds_name = get_tmp_ds_name(symbols=True).replace("-", "") - results = hosts.all.zos_data_set(name=data_set_name, state="present", type="gdg", limit=3) + results = hosts.all.shell(cmd=f"dtouch -tGDG -L3 '{data_set_name}'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.zos_data_set(name=f"{data_set_name}(+1)", state="present", type=dstype) + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{data_set_name}(+1)'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None - results = hosts.all.zos_data_set(name=ds_name, state="present", type=dstype) + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{ds_name}'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None @@ -1245,10 +1305,115 @@ def managed_user_backup_of_data_set_tmphlq_restricted_user(ansible_zos_module): for result in results.contacted.values(): assert result.get("backup_name") == '', \ f"Backup name '{backup_name}' is there in output so tmphlq failed." - print(result) assert result.get("changed", False) is False - + finally: delete_data_set_or_file(hosts, data_set_name) delete_data_set_or_file(hosts, backup_name) - delete_remnants(hosts, hlqs) \ No newline at end of file + delete_remnants(hosts, hlqs) + + +def test_backup_of_vsam_index(ansible_zos_module, volumes_with_vvds): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + alternate_index = get_tmp_ds_name() + backup_name = get_tmp_ds_name() + + try: + volume_handler = Volume_Handler(volumes_with_vvds) + volume = volume_handler.get_available_vol() + # Create VSAM KSDS + create_vsam( + hosts, data_set_name + ) + # Create alternate indexes + aix_cmd = f""" +echo ' DEFINE ALTERNATEINDEX (NAME({alternate_index}) - + RELATE({data_set_name}) - + KEYS(4 0) - + VOLUMES({volume}) - + CYLINDERS(10 1) - + FREESPACE(10 10) - + NONUNIQUEKEY) - + DATA (NAME({alternate_index}.DATA)) - + INDEX (NAME({alternate_index}.INDEX)) ' | mvscmdauth --pgm=IDCAMS --sysprint=* --sysin=stdin + + """ + results = hosts.all.shell(cmd=f"{aix_cmd}") + assert_module_did_not_fail(results) + + + results = hosts.all.zos_backup_restore( + operation="backup", + data_sets=dict(include=data_set_name), + backup_name=backup_name, + index=True, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, backup_name) + + # Delete the vsam data set and alternate index + delete_data_set(hosts, data_set_name) + delete_data_set(hosts, alternate_index) + + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=backup_name, + index=True, + ) + + # Validate that both original vsam and alternate index exist + vls_result = hosts.all.shell(f"vls {alternate_index}") + assert_module_did_not_fail(vls_result) + for result in vls_result.contacted.values(): + assert alternate_index in result.get("stdout") + assert f"{alternate_index}.DATA" in result.get("stdout") + assert f"{alternate_index}.INDEX" in result.get("stdout") + vls_result = hosts.all.shell(f"vls {data_set_name}") + assert_module_did_not_fail(vls_result) + for result in vls_result.contacted.values(): + assert data_set_name in result.get("stdout") + assert f"{data_set_name}.DATA" in result.get("stdout") + assert f"{data_set_name}.INDEX" in result.get("stdout") + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, alternate_index) + delete_data_set_or_file(hosts, backup_name) + + +def test_backup_and_restore_of_auth_shr_group(ansible_zos_module, volumes_sms_systems): + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + data_set_backup_location = get_tmp_ds_name() + try: + volumes = Volume_Handler(volumes_sms_systems) + volume, smsgrp = volumes.get_available_vol_with_sms() + delete_data_set_or_file(hosts, data_set_backup_location) + delete_data_set_or_file(hosts, data_set_name) + create_sequential_data_set_with_contents( + hosts, data_set_name, DATA_SET_CONTENTS, volume + ) + results = hosts.all.zos_backup_restore( + data_sets=dict(include=data_set_name), + operation="backup", + backup_name=data_set_backup_location, + overwrite=True, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, data_set_backup_location) + delete_data_set_or_file(hosts, data_set_name) + access = { + "share":True, + "auth":True + } + results = hosts.all.zos_backup_restore( + operation="restore", + backup_name=data_set_backup_location, + overwrite=True, + access=access, + ) + assert_module_did_not_fail(results) + assert_data_set_or_file_exists(hosts, data_set_name) + finally: + delete_data_set_or_file(hosts, data_set_name) + delete_data_set_or_file(hosts, data_set_backup_location) \ No newline at end of file diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index ec21fdf331..146af86b95 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -440,6 +440,18 @@ BACKUP_OPTIONS = [None, "SEQ", "MEM"] +expected_keys = [ + 'changed', + 'cmd', + 'found', + 'stdout', + 'stdout_lines', + 'stderr', + 'stderr_lines', + 'rc' +] + + def set_uss_environment(ansible_zos_module, content, file): hosts = ansible_zos_module hosts.all.file(path=file, state="touch") @@ -452,10 +464,12 @@ def remove_uss_environment(ansible_zos_module, file): def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content): hosts = ansible_zos_module hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_data_set(name=ds_name, type=ds_type) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_name}'") + # hosts.all.zos_data_set(name=ds_name, type=ds_type) if ds_type in ["pds", "pdse"]: ds_full_name = ds_name + "(MEM)" - hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") + hosts.all.shell(cmd=f"decho '' '{ds_full_name}'") + # hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") cmd_str = f"cp -CM {quote(temp_file)} \"//'{ds_full_name}'\"" else: ds_full_name = ds_name @@ -466,7 +480,8 @@ def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) def remove_ds_environment(ansible_zos_module, ds_name): hosts = ansible_zos_module - hosts.all.zos_data_set(name=ds_name, state="absent") + hosts.all.shell(cmd=f"drm '{ds_name}'") + # hosts.all.zos_data_set(name=ds_name, state="absent") ######################### # USS test cases @@ -489,6 +504,7 @@ def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX @@ -512,6 +528,7 @@ def test_uss_block_insertbefore_regex_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX @@ -535,6 +552,7 @@ def test_uss_block_insertafter_eof_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF @@ -558,6 +576,7 @@ def test_uss_block_insertbefore_bof_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF @@ -585,6 +604,7 @@ def test_uss_block_insertafter_regex_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX_CUSTOM @@ -613,6 +633,7 @@ def test_uss_block_insertbefore_regex_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX_CUSTOM @@ -640,6 +661,7 @@ def test_uss_block_insertafter_eof_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF_CUSTOM @@ -667,6 +689,7 @@ def test_uss_block_insertbefore_bof_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF_CUSTOM @@ -689,6 +712,7 @@ def test_uss_block_absent_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT @@ -714,6 +738,7 @@ def test_uss_block_absent_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT @@ -737,6 +762,7 @@ def test_uss_block_replace_insertafter_regex_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER @@ -760,6 +786,7 @@ def test_uss_block_replace_insertbefore_regex_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE @@ -783,6 +810,7 @@ def test_uss_block_replace_insertafter_eof_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_EOF_CUSTOM @@ -806,6 +834,7 @@ def test_uss_block_replace_insertbefore_bof_defaultmarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_BOF_CUSTOM @@ -832,6 +861,7 @@ def test_uss_block_replace_insertafter_regex_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_EOF_REGEX_CUSTOM @@ -858,6 +888,7 @@ def test_uss_block_replace_insertbefore_regex_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_BOF_REGEX_CUSTOM @@ -884,6 +915,7 @@ def test_uss_block_replace_insertafter_eof_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF_CUSTOM @@ -910,6 +942,7 @@ def test_uss_block_replace_insertbefore_bof_custommarker(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF_CUSTOM @@ -934,6 +967,7 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERT_WITH_INDENTATION @@ -955,6 +989,7 @@ def test_uss_block_insert_with_doublequotes(ansible_zos_module): for result in results.contacted.values(): print(result) assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES @@ -980,6 +1015,7 @@ def test_uss_block_insertafter_eof_with_backup(ansible_zos_module): for result in results.contacted.values(): backup_name = result.get("backup_name") assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) assert backup_name is not None results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): @@ -1008,6 +1044,7 @@ def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) assert result.get("backup_name") == uss_backup_file cmd_str = f"cat {uss_backup_file}" results = ansible_zos_module.all.shell(cmd=cmd_str) @@ -1045,6 +1082,7 @@ def test_ds_block_insertafter_regex(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX @@ -1071,6 +1109,7 @@ def test_ds_block_insertbefore_regex(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX @@ -1097,6 +1136,7 @@ def test_ds_block_insertafter_eof(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF @@ -1123,6 +1163,7 @@ def test_ds_block_insertbefore_bof(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF @@ -1149,6 +1190,7 @@ def test_ds_block_replace_insertafter_regex(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER @@ -1175,6 +1217,7 @@ def test_ds_block_replace_insertbefore_regex(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE @@ -1201,6 +1244,7 @@ def test_ds_block_replace_insertafter_eof(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF @@ -1227,6 +1271,7 @@ def test_ds_block_replace_insertbefore_bof(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF @@ -1252,6 +1297,7 @@ def test_ds_block_absent(ansible_zos_module, dstype): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT @@ -1279,7 +1325,8 @@ def test_ds_tmp_hlq_option(ansible_zos_module): try: ds_full_name = get_tmp_ds_name() temp_file = get_random_file_name(dir=TMP_DIRECTORY) - hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_full_name}'") + # hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " hosts.all.shell(cmd=cmd_str) @@ -1293,7 +1340,8 @@ def test_ds_tmp_hlq_option(ansible_zos_module): for key in kwargs: assert kwargs.get(key) in result.get(key) finally: - hosts.all.zos_data_set(name=ds_full_name, state="absent") + hosts.all.shell(cmd=f"drm '{ds_full_name}'") + # hosts.all.zos_data_set(name=ds_full_name, state="absent") hosts.all.file(name=temp_file, state="absent") @@ -1317,6 +1365,7 @@ def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, ds results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERT_WITH_INDENTATION @@ -1349,6 +1398,7 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) backup_ds_name = result.get("backup_name") assert backup_ds_name is not None results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) @@ -1365,7 +1415,8 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup finally: remove_ds_environment(ansible_zos_module, ds_name) if backup_ds_name != "": - ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") + hosts.all.shell(cmd=f"drm '{backup_ds_name}'") + # ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") @@ -1390,24 +1441,28 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): else: params["path"] = f"{default_data_set_name}({member_2})" try: + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{default_data_set_name}'") # set up: - hosts.all.zos_data_set( - name=default_data_set_name, - state="present", - type=ds_type, - replace=True - ) + # hosts.all.zos_data_set( + # name=default_data_set_name, + # state="present", + # type=ds_type, + # replace=True + # ) hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_data_set( - batch=[ - { - "name": f"{default_data_set_name}({member_1})", - "type": "member", - "state": "present", "replace": True, }, - { "name": params["path"], "type": "member", - "state": "present", "replace": True, }, - ] - ) + # Create two empty members + hosts.all.shell(cmd=f"decho '' '{default_data_set_name}({member_1})'") + hosts.all.shell(cmd=f"decho '' '{params['path']}'") + # hosts.all.zos_data_set( + # batch=[ + # { + # "name": f"{default_data_set_name}({member_1})", + # "type": "member", + # "state": "present", "replace": True, }, + # { "name": params["path"], "type": "member", + # "state": "present", "replace": True, }, + # ] + # ) # write memeber to verify cases if ds_type in ["pds", "pdse"]: cmd_str = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), params["path"]) @@ -1441,7 +1496,8 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") hosts.all.shell(cmd='rm -r {0}'.format(path)) - hosts.all.zos_data_set(name=default_data_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{default_data_set_name}'") + # hosts.all.zos_data_set(name=default_data_set_name, state="absent") @pytest.mark.ds @@ -1459,6 +1515,7 @@ def test_gdd_ds_insert_block(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["src"])) for result in results.contacted.values(): assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" @@ -1467,6 +1524,7 @@ def test_gdd_ds_insert_block(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["src"])) for result in results.contacted.values(): assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" @@ -1476,6 +1534,7 @@ def test_gdd_ds_insert_block(ansible_zos_module): results = hosts.all.zos_blockinfile(**params_w_bck) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) assert result.get("rc") == 0 backup = ds_name + "(0)" results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) @@ -1497,12 +1556,14 @@ def test_special_characters_ds_insert_block(ansible_zos_module): ds_name = get_tmp_ds_name(5, 5, symbols=True) backup = get_tmp_ds_name(6, 6, symbols=True) try: - result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") + hosts.all.shell(cmd=f"dtouch -tseq '{ds_name}'") + # result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") params["src"] = ds_name results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) src = ds_name.replace('$', "\$") results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(src)) for result in results.contacted.values(): @@ -1513,6 +1574,7 @@ def test_special_characters_ds_insert_block(ansible_zos_module): results = hosts.all.zos_blockinfile(**params_w_bck) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) assert result.get("rc") == 0 backup = backup.replace('$', "\$") results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) @@ -1547,6 +1609,7 @@ def test_uss_encoding(ansible_zos_module, encoding): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ENCODING @@ -1562,12 +1625,14 @@ def test_special_characters_ds_insert_block(ansible_zos_module): ds_name = get_tmp_ds_name(5, 5, symbols=True) backup = get_tmp_ds_name(6, 6, symbols=True) try: - result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") + hosts.all.shell(cmd=f"dtouch -tseq '{ds_name}'") + # result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") params["src"] = ds_name results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) src = ds_name.replace('$', "\$") results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(src)) for result in results.contacted.values(): @@ -1578,6 +1643,7 @@ def test_special_characters_ds_insert_block(ansible_zos_module): results = hosts.all.zos_blockinfile(**params_w_bck) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) assert result.get("rc") == 0 backup = backup.replace('$', "\$") results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) @@ -1626,6 +1692,7 @@ def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF @@ -1661,7 +1728,8 @@ def test_ds_not_supported(ansible_zos_module, dstype): ds_name = get_tmp_ds_name() try: ds_name = ds_name.upper() + "." + ds_type - results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') + results = hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_name}'") + # results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') for result in results.contacted.values(): assert result.get("changed") is True params["path"] = ds_name @@ -1670,7 +1738,8 @@ def test_ds_not_supported(ansible_zos_module, dstype): assert result.get("changed") is False assert result.get("msg") == "VSAM data set type is NOT supported" finally: - hosts.all.zos_data_set(name=ds_name, state="absent") + hosts.all.shell(cmd=f"drm '{ds_name}'") + # hosts.all.zos_data_set(name=ds_name, state="absent") # Enhancemed #1339 @@ -1692,30 +1761,34 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): params["path"] = f"{default_data_set_name}({member_2})" content = TEST_CONTENT try: + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{default_data_set_name}'") # set up: - hosts.all.zos_data_set( - name=default_data_set_name, - state="present", - type=ds_type, - replace=True - ) + # hosts.all.zos_data_set( + # name=default_data_set_name, + # state="present", + # type=ds_type, + # replace=True + # ) hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_data_set( - batch=[ - { - "name": f"{default_data_set_name}({member_1})", - "type": "member", - "state": "present", - "replace": True, - }, - { - "name": params["path"], - "type": "member", - "state": "present", - "replace": True, - }, - ] - ) + # Create two empty members + hosts.all.shell(cmd=f"decho '' '{default_data_set_name}({member_1})'") + hosts.all.shell(cmd=f"decho '' '{params['path']}'") + # hosts.all.zos_data_set( + # batch=[ + # { + # "name": f"{default_data_set_name}({member_1})", + # "type": "member", + # "state": "present", + # "replace": True, + # }, + # { + # "name": params["path"], + # "type": "member", + # "state": "present", + # "replace": True, + # }, + # ] + # ) cmd_str = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file) ,params["path"]) hosts.all.shell(cmd=cmd_str) results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) @@ -1742,4 +1815,5 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") hosts.all.shell(cmd='rm -r {0}'.format(path)) - hosts.all.zos_data_set(name=default_data_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{default_data_set_name}'") + # hosts.all.zos_data_set(name=default_data_set_name, state="absent") diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index 9cfb15bf94..83338c22b5 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -247,7 +247,7 @@ zos_copy: src: /etc/profile remote_src: True - force: True + replace: True dest: {3} async: 50 poll: 0 @@ -408,13 +408,13 @@ def link_loadlib_from_cobol(hosts, cobol_src_pds, cobol_src_mem, loadlib_pds, lo cp_res = hosts.all.zos_copy( content=LINK_JCL.format(cobol_src_pds, cobol_src_mem, loadlib_pds, loadlib_mem, loadlib_alias_mem), dest=temp_jcl_uss_path, - force=True, + replace=True, ) # Submit link JCL. job_result = hosts.all.zos_job_submit( src=temp_jcl_uss_path, - location="uss", - wait_time_s=60 + remote_src=True, + wait_time=60 ) for result in job_result.contacted.values(): print(result) @@ -462,8 +462,8 @@ def generate_loadlib(hosts, cobol_src_pds, cobol_src_mems, loadlib_pds, loadlib_ def generate_executable_uss(hosts, dir, src, src_jcl_call): - hosts.all.zos_copy(content=hello_world, dest=src, force=True) - hosts.all.zos_copy(content=call_c_hello_jcl.format(dir), dest=src_jcl_call, force=True) + hosts.all.zos_copy(content=hello_world, dest=src, replace=True) + hosts.all.zos_copy(content=call_c_hello_jcl.format(dir), dest=src_jcl_call, replace=True) hosts.all.shell(cmd="xlc -o hello_world hello_world.c", chdir=dir) hosts.all.shell(cmd="submit {0}".format(src_jcl_call)) verify_exe_src = hosts.all.shell(cmd="{0}/hello_world".format(dir)) @@ -475,12 +475,12 @@ def generate_executable_uss(hosts, dir, src, src_jcl_call): @pytest.mark.uss @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=False), - dict(src="/etc/profile", is_file=True, is_binary=True, is_remote=False), - dict(src="Example inline content", is_file=False, is_binary=False, is_remote=False), - dict(src="Example inline content", is_file=False, is_binary=True, is_remote=False), - dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=True), - dict(src="/etc/profile", is_file=True, is_binary=True, is_remote=True), + dict(src="/etc/profile", is_file=True, binary=False, is_remote=False), + dict(src="/etc/profile", is_file=True, binary=True, is_remote=False), + dict(src="Example inline content", is_file=False, binary=False, is_remote=False), + dict(src="Example inline content", is_file=False, binary=True, is_remote=False), + dict(src="/etc/profile", is_file=True, binary=False, is_remote=True), + dict(src="/etc/profile", is_file=True, binary=True, is_remote=True), ]) def test_copy_file_to_non_existing_uss_file(ansible_zos_module, src): hosts = ansible_zos_module @@ -490,16 +490,19 @@ def test_copy_file_to_non_existing_uss_file(ansible_zos_module, src): hosts.all.file(path=dest_path, state="absent") if src["is_file"]: - copy_res = hosts.all.zos_copy(src=src["src"], dest=dest_path, is_binary=src["is_binary"], remote_src=src["is_remote"]) + copy_res = hosts.all.zos_copy(src=src["src"], dest=dest_path, binary=src["binary"], remote_src=src["is_remote"]) else: - copy_res = hosts.all.zos_copy(content=src["src"], dest=dest_path, is_binary=src["is_binary"]) + copy_res = hosts.all.zos_copy(content=src["src"], dest=dest_path, binary=src["binary"]) stat_res = hosts.all.stat(path=dest_path) for result in copy_res.contacted.values(): + print(result) assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_path assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True finally: @@ -508,12 +511,12 @@ def test_copy_file_to_non_existing_uss_file(ansible_zos_module, src): @pytest.mark.uss @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_file=True, force=False, is_remote=False), - dict(src="/etc/profile", is_file=True, force=True, is_remote=False), - dict(src="Example inline content", is_file=False, force=False, is_remote=False), - dict(src="Example inline content", is_file=False, force=True, is_remote=False), - dict(src="/etc/profile", is_file=True, force=False, is_remote=True), - dict(src="/etc/profile", is_file=True, force=True, is_remote=True), + dict(src="/etc/profile", is_file=True, replace=False, is_remote=False), + dict(src="/etc/profile", is_file=True, replace=True, is_remote=False), + dict(src="Example inline content", is_file=False, replace=False, is_remote=False), + dict(src="Example inline content", is_file=False, replace=True, is_remote=False), + dict(src="/etc/profile", is_file=True, replace=False, is_remote=True), + dict(src="/etc/profile", is_file=True, replace=True, is_remote=True), ]) def test_copy_file_to_existing_uss_file(ansible_zos_module, src): hosts = ansible_zos_module @@ -526,18 +529,20 @@ def test_copy_file_to_existing_uss_file(ansible_zos_module, src): assert timestamp is not None if src["is_file"]: - copy_res = hosts.all.zos_copy(src=src["src"], dest=dest_path, force=src["force"], remote_src=src["is_remote"]) + copy_res = hosts.all.zos_copy(src=src["src"], dest=dest_path, replace=src["replace"], remote_src=src["is_remote"]) else: - copy_res = hosts.all.zos_copy(content=src["src"], dest=dest_path, force=src["force"]) + copy_res = hosts.all.zos_copy(content=src["src"], dest=dest_path, replace=src["replace"]) stat_res = hosts.all.stat(path=dest_path) for result in copy_res.contacted.values(): - if src["force"]: + if src["replace"]: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_path assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -549,10 +554,10 @@ def test_copy_file_to_existing_uss_file(ansible_zos_module, src): @pytest.mark.uss @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_binary=False, is_remote=False), - dict(src="/etc/profile", is_binary=True, is_remote=False), - dict(src="/etc/profile", is_binary=False, is_remote=True), - dict(src="/etc/profile", is_binary=True, is_remote=True), + dict(src="/etc/profile", binary=False, is_remote=False), + dict(src="/etc/profile", binary=True, is_remote=False), + dict(src="/etc/profile", binary=False, is_remote=True), + dict(src="/etc/profile", binary=True, is_remote=True), ]) def test_copy_file_to_uss_dir(ansible_zos_module, src): hosts = ansible_zos_module @@ -562,7 +567,7 @@ def test_copy_file_to_uss_dir(ansible_zos_module, src): try: - copy_res = hosts.all.zos_copy(src=src["src"], dest=dest, is_binary=src["is_binary"], remote_src=src["is_remote"]) + copy_res = hosts.all.zos_copy(src=src["src"], dest=dest, binary=src["binary"], remote_src=src["is_remote"]) stat_res = hosts.all.stat(path=dest_path) for result in copy_res.contacted.values(): @@ -570,6 +575,8 @@ def test_copy_file_to_uss_dir(ansible_zos_module, src): assert result.get("changed") is True assert result.get("dest") == dest_path assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None for st in stat_res.contacted.values(): assert st.get("stat").get("exists") is True finally: @@ -592,7 +599,8 @@ def test_copy_file_to_uss_dir_missing_parents(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest - assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None for st in stat_res.contacted.values(): assert st.get("stat").get("exists") is True finally: @@ -616,6 +624,11 @@ def test_copy_local_symlink_to_uss_file(ansible_zos_module): stat_res = hosts.all.stat(path=dest_path) for result in copy_res.contacted.values(): assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True for result in verify_copy.contacted.values(): @@ -642,6 +655,8 @@ def test_copy_local_file_to_uss_file_convert_encoding(ansible_zos_module): assert result.get("changed") is True assert result.get("dest") == dest_path assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True finally: @@ -665,6 +680,8 @@ def test_copy_local_file_to_uss_file_with_absent_remote_tmp_dir(ansible_zos_modu assert result.get("changed") is True assert result.get("dest") == dest_path assert result.get("state") == "file" + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True finally: @@ -685,6 +702,8 @@ def test_copy_inline_content_to_uss_dir(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_path + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True finally: @@ -708,11 +727,10 @@ def test_copy_dir_to_existing_uss_dir_not_forced(ansible_zos_module): src=src_dir, dest=dest_dir, remote_src=True, - force=False + replace=False ) for result in copy_result.contacted.values(): - print(result) assert result.get("msg") is not None assert result.get("changed") is False assert "Error" in result.get("msg") @@ -799,6 +817,9 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding(ansible_zos_module for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None # File z/OS dest is now UTF-8, dump the hex value and compare it to an # expected big-endian version, can't run delegate_to local host so expected @@ -840,6 +861,9 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_ for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None full_outer_file= "{0}/{1}/file3".format(dest_path, level_1) full_iner_file= "{0}/{1}/{2}/file3".format(dest_path, level_1, level_2) @@ -847,10 +871,8 @@ def test_copy_subdirs_folders_and_validate_recursive_encoding_local(ansible_zos_ verify_copy_2 = hosts.all.shell(cmd="cat {0}".format(full_iner_file)) for result in verify_copy_1.contacted.values(): - print(result) assert result.get("stdout") == DUMMY_DATA for result in verify_copy_2.contacted.values(): - print(result) assert result.get("stdout") == DUMMY_DATA finally: hosts.all.file(name=dest_path, state="absent") @@ -888,6 +910,9 @@ def test_copy_local_dir_to_non_existing_dir(ansible_zos_module, copy_directory): for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if copy_directory: assert result.get("dest") == "{0}/{1}".format(dest_path, src_basename) @@ -938,6 +963,9 @@ def test_copy_uss_dir_to_non_existing_dir(ansible_zos_module, copy_directory): for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if copy_directory: assert result.get("dest") == "{0}/{1}".format(dest_dir, src_basename) @@ -981,7 +1009,7 @@ def test_copy_local_dir_to_existing_dir_forced(ansible_zos_module, copy_director copy_result = hosts.all.zos_copy( src=source_path, dest=dest_path, - force=True + replace=True ) stat_source_res = hosts.all.stat(path="{0}/{1}".format(dest_path, source_basename)) @@ -994,6 +1022,9 @@ def test_copy_local_dir_to_existing_dir_forced(ansible_zos_module, copy_director for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if copy_directory: assert result.get("dest") == "{0}/{1}".format(dest_path, source_basename) @@ -1042,7 +1073,7 @@ def test_copy_uss_dir_to_existing_dir_forced(ansible_zos_module, copy_directory) src=src_dir, dest=dest_dir, remote_src=True, - force=True + replace=True ) stat_dir_res = hosts.all.stat(path="{0}/{1}".format(dest_dir, src_basename)) @@ -1055,6 +1086,9 @@ def test_copy_uss_dir_to_existing_dir_forced(ansible_zos_module, copy_directory) for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if copy_directory: assert result.get("dest") == "{0}/{1}".format(dest_dir, src_basename) @@ -1105,7 +1139,7 @@ def test_copy_local_nested_dir_to_uss(ansible_zos_module, create_dest): copy_result = hosts.all.zos_copy( src=source_path, dest=dest_path, - force=create_dest + replace=create_dest ) stat_subdir_a_res = hosts.all.stat(path="{0}/subdir_a".format(dest_path)) @@ -1115,6 +1149,8 @@ def test_copy_local_nested_dir_to_uss(ansible_zos_module, create_dest): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_path + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_subdir_a_res.contacted.values(): assert result.get("stat").get("exists") is True assert result.get("stat").get("isdir") is True @@ -1147,7 +1183,7 @@ def test_copy_uss_nested_dir_to_uss(ansible_zos_module, create_dest): src=source_path, dest=dest_path, remote_src=True, - force=create_dest + replace=create_dest ) stat_subdir_a_res = hosts.all.stat(path="{0}/subdir_a".format(dest_path)) @@ -1157,6 +1193,8 @@ def test_copy_uss_nested_dir_to_uss(ansible_zos_module, create_dest): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_path + assert result.get("src") is not None + assert result.get("dest_created") is not None for result in stat_subdir_a_res.contacted.values(): assert result.get("stat").get("exists") is True assert result.get("stat").get("isdir") is True @@ -1202,7 +1240,7 @@ def test_copy_local_dir_and_change_mode(ansible_zos_module, copy_directory): copy_result = hosts.all.zos_copy( src=source_path, dest=dest_path, - force=True, + replace=True, mode=mode ) @@ -1218,6 +1256,9 @@ def test_copy_local_dir_and_change_mode(ansible_zos_module, copy_directory): for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if copy_directory: assert result.get("dest") == dest_subdir @@ -1295,7 +1336,7 @@ def test_copy_uss_dir_and_change_mode(ansible_zos_module, copy_directory): copy_result = hosts.all.zos_copy( src=source_path, dest=dest_path, - force=True, + replace=True, remote_src=True, mode=mode ) @@ -1312,6 +1353,9 @@ def test_copy_uss_dir_and_change_mode(ansible_zos_module, copy_directory): for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if copy_directory: assert result.get("dest") == dest_subdir @@ -1370,13 +1414,17 @@ def test_backup_uss_file(ansible_zos_module, backup): if backup: backup_name = get_random_file_name(dir=TMP_DIRECTORY) - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup_name) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=True, backup=True, backup_name=backup_name) else: - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=True, backup=True) for result in copy_res.contacted.values(): assert result.get("msg") is None backup_name_result = result.get("backup_name") + assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("src") is not None + assert result.get("dest_created") is not None if backup: assert backup_name_result == backup_name @@ -1474,6 +1522,8 @@ def test_copy_template_file(ansible_zos_module, encoding): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_template + assert cp_res.get("src") is not None + assert cp_res.get("dest_created") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 # Checking that all markers got replaced. @@ -1534,7 +1584,7 @@ def test_copy_template_dir(ansible_zos_module): src=temp_dir, dest=dest_path, use_template=True, - force=True + replace=True ) verify_copy_a = hosts.all.shell( @@ -1550,6 +1600,8 @@ def test_copy_template_dir(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_path + assert cp_res.get("src") is not None + assert cp_res.get("dest_created") is not None for v_cp in verify_copy_a.contacted.values(): assert v_cp.get("rc") == 0 # Checking that all markers got replaced. @@ -1620,6 +1672,8 @@ def test_copy_template_file_with_non_default_markers(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_template + assert cp_res.get("src") is not None + assert cp_res.get("dest_created") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 # Checking that all markers got replaced. @@ -1671,6 +1725,8 @@ def test_copy_template_file_to_dataset(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_dataset + assert cp_res.get("src") is not None + assert cp_res.get("dest_created") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 # Checking that all markers got replaced. @@ -1719,6 +1775,7 @@ def test_copy_asa_file_to_asa_sequential(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -1754,11 +1811,11 @@ def test_copy_asa_file_to_asa_partitioned(ansible_zos_module): ) for cp_res in copy_result.contacted.values(): - print(cp_res) assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == full_dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): print(v_cp) assert v_cp.get("rc") == 0 @@ -1813,6 +1870,7 @@ def test_copy_seq_data_set_to_seq_asa(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -1869,6 +1927,7 @@ def test_copy_seq_data_set_to_partitioned_asa(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == full_dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -1925,6 +1984,7 @@ def test_copy_partitioned_data_set_to_seq_asa(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -1982,6 +2042,7 @@ def test_copy_partitioned_data_set_to_partitioned_asa(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == full_dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -2033,6 +2094,8 @@ def test_copy_asa_data_set_to_text_file(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 # Since OPUT preserves all blank spaces associated @@ -2058,9 +2121,16 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, try: hosts.all.file(path=dest_path, state="directory", mode=mode) permissions_before = hosts.all.stat(path=dest_path) - hosts.all.zos_copy(src=src["src"], dest=dest_path, mode=other_mode) + cp_bef_result = hosts.all.zos_copy(src=src["src"], dest=dest_path, mode=other_mode) permissions = hosts.all.stat(path=dest_path) + for cp_res in cp_bef_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is True + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None + for before in permissions_before.contacted.values(): permissions_be_copy = before.get("stat").get("mode") @@ -2070,7 +2140,15 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, assert permissions_be_copy == permissions_af_copy # Extra asserts to ensure change mode rewrite a copy - hosts.all.zos_copy(src=src["src"], dest=dest_path, mode=mode_overwrite) + af_bef_result = hosts.all.zos_copy(src=src["src"], dest=dest_path, mode=mode_overwrite) + + for cp_res in af_bef_result.contacted.values(): + assert cp_res.get("msg") is None + assert cp_res.get("changed") is False + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None + permissions_overwriten = hosts.all.stat(path = full_path) for over in permissions_overwriten.contacted.values(): assert over.get("stat").get("mode") == mode_overwrite @@ -2079,7 +2157,7 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, @pytest.mark.seq -@pytest.mark.parametrize("ds_type, f_lock",[ +@pytest.mark.parametrize("ds_type, force",[ ("pds", True), # Success path, pds locked, force_lock enabled and user authorized ("pdse", True), # Success path, pdse locked, force_lock enabled and user authorized ("seq", True), # Success path, seq locked, force_lock enabled and user authorized @@ -2087,7 +2165,7 @@ def test_ensure_copy_file_does_not_change_permission_on_dest(ansible_zos_module, ("pdse", False), # Module exits with: Unable to write to dest '{0}' because a task is accessing the data set." ("seq", False), # Module exits with: Unable to write to dest '{0}' because a task is accessing the data set." ]) -def test_copy_dest_lock_wrapper(ansible_zos_module, ds_type, f_lock): +def test_copy_dest_lock_wrapper(ansible_zos_module, ds_type, force): retries = 0 max_retries = 5 success = False @@ -2095,8 +2173,8 @@ def test_copy_dest_lock_wrapper(ansible_zos_module, ds_type, f_lock): # Not adding a try/except block here so a real exception can bubble up # and stop pytest immediately (if using -x or --stop). while retries < max_retries: - print(f'Trying dest lock for {ds_type}. Expecting success? {f_lock}. Retry: {retries}.') - result = copy_dest_lock(ansible_zos_module, ds_type, f_lock) + print(f'Trying dest lock for {ds_type}. Expecting success? {force}. Retry: {retries}.') + result = copy_dest_lock(ansible_zos_module, ds_type, force) if result: success = True @@ -2107,7 +2185,7 @@ def test_copy_dest_lock_wrapper(ansible_zos_module, ds_type, f_lock): assert success is True -def copy_dest_lock(ansible_zos_module, ds_type, f_lock): +def copy_dest_lock(ansible_zos_module, ds_type, force): hosts = ansible_zos_module assert_msg = "" @@ -2177,34 +2255,38 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): decho_result = hosts.all.shell(cmd="decho \"{0}\" \"{1}\"".format(DUMMY_DATA, src_data_set)) for result in decho_result.contacted.values(): assert_msg = result.get("stdout", "") - print(result) assert result.get("changed") is True assert result.get("failed", False) is False # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) temp_dir = get_random_file_name(dir=TMP_DIRECTORY) - c_src_result = hosts.all.zos_copy(content=c_pgm, dest=f'{temp_dir}/pdse-lock.c', force=True) + c_src_result = hosts.all.zos_copy(content=c_pgm, dest=f'{temp_dir}/pdse-lock.c', replace=True) for result in c_src_result.contacted.values(): assert_msg = result.get("stdout", "") - print(result) assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None jcl_result = hosts.all.zos_copy( content=call_c_jcl.format(temp_dir, dest_data_set), dest=f'{temp_dir}/call_c_pgm.jcl', - force=True + replace=True ) for result in jcl_result.contacted.values(): assert_msg = result.get("stdout", "") - print(result) - assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None subproc_result = hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=f"{temp_dir}/") for result in subproc_result.contacted.values(): assert_msg = result.get("stdout", "") - print(result) assert result.get("changed") is True assert result.get("failed", False) is False @@ -2212,7 +2294,6 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): job_result = hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir=f"{temp_dir}/") for result in job_result.contacted.values(): assert_msg = result.get("stdout", "") - print(result) assert result.get("changed") is True assert result.get("failed", False) is False @@ -2222,23 +2303,25 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): src = src_data_set, dest = dest_data_set, remote_src = True, - force=True, - force_lock=f_lock, + replace=True, + force=force, ) for result in results.contacted.values(): assert_msg = result.get("stdout", "") - print(result) - if f_lock: #and apf_auth_user: + if force: #and apf_auth_user: + print(result) assert result.get("changed") == True assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None # verify that the content is the same verify_copy = hosts.all.shell( cmd="dcat \'{0}\'".format(dest_data_set), executable=SHELL_EXECUTABLE, ) for vp_result in verify_copy.contacted.values(): - print(vp_result) verify_copy_2 = hosts.all.shell( cmd="dcat \'{0}\'".format(src_data_set), executable=SHELL_EXECUTABLE, @@ -2246,7 +2329,7 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): for vp_result_2 in verify_copy_2.contacted.values(): print(vp_result_2) assert vp_result_2.get("stdout") == vp_result.get("stdout") - elif not f_lock: + elif not force: assert result.get("failed") is True assert result.get("changed") == False assert "because a task is accessing the data set" in result.get("msg") @@ -2256,7 +2339,7 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): except AssertionError: # Checking for the error code from when the system thinks both data sets # are identical. - if "FSUM8977" in assert_msg: + if "FSUM8977" in str(assert_msg): return False else: raise @@ -2277,7 +2360,7 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.asa -@pytest.mark.parametrize("ds_type, f_lock",[ +@pytest.mark.parametrize("ds_type, force",[ ("pds", True), # Success path, pds locked, force_lock enabled and user authorized ("pdse", True), # Success path, pdse locked, force_lock enabled and user authorized ("seq", True), # Success path, seq locked, force_lock enabled and user authorized @@ -2285,7 +2368,7 @@ def copy_dest_lock(ansible_zos_module, ds_type, f_lock): ("pdse", False), # Module exits with: Unable to write to dest '{0}' because a task is accessing the data set." ("seq", False), # Module exits with: Unable to write to dest '{0}' because a task is accessing the data set." ]) -def test_copy_dest_lock_wrapper_asa(ansible_zos_module, ds_type, f_lock): +def test_copy_dest_lock_wrapper_asa(ansible_zos_module, ds_type, force): retries = 0 max_retries = 5 success = False @@ -2293,8 +2376,8 @@ def test_copy_dest_lock_wrapper_asa(ansible_zos_module, ds_type, f_lock): # Not adding a try/except block here so a real exception can bubble up # and stop pytest immediately (if using -x or --stop). while retries < max_retries: - print(f'Trying dest lock for {ds_type}. Expecting success? {f_lock}. Retry: {retries}.') - result = copy_asa_dest_lock(ansible_zos_module, ds_type, f_lock) + print(f'Trying dest lock for {ds_type}. Expecting success? {force}. Retry: {retries}.') + result = copy_asa_dest_lock(ansible_zos_module, ds_type, force) if result: success = True @@ -2305,7 +2388,7 @@ def test_copy_dest_lock_wrapper_asa(ansible_zos_module, ds_type, f_lock): assert success is True -def copy_asa_dest_lock(ansible_zos_module, ds_type, f_lock): +def copy_asa_dest_lock(ansible_zos_module, ds_type, force): hosts = ansible_zos_module assert_msg = "" @@ -2348,23 +2431,30 @@ def copy_asa_dest_lock(ansible_zos_module, ds_type, f_lock): # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) temp_dir = get_random_file_name(dir=TMP_DIRECTORY) - c_src_result = hosts.all.zos_copy(content=c_pgm, dest=f'{temp_dir}/pdse-lock.c', force=True) + c_src_result = hosts.all.zos_copy(content=c_pgm, dest=f'{temp_dir}/pdse-lock.c', replace=True) for result in c_src_result.contacted.values(): assert_msg = result.get("stdout", "") - print(result) assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None jcl_result = hosts.all.zos_copy( content=call_c_jcl.format(temp_dir, dest_data_set), dest=f'{temp_dir}/call_c_pgm.jcl', - force=True + replace=True ) for result in jcl_result.contacted.values(): assert_msg = result.get("stdout", "") print(result) assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None subproc_result = hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=f"{temp_dir}/") for result in subproc_result.contacted.values(): @@ -2389,16 +2479,19 @@ def copy_asa_dest_lock(ansible_zos_module, ds_type, f_lock): dest=dest_data_set, remote_src=False, asa_text=True, - force=True, - force_lock=f_lock + replace=True, + force=force ) for result in results.contacted.values(): assert_msg = result.get("stdout", "") print(result) - if f_lock: #and apf_auth_user: + if force: #and apf_auth_user: assert result.get("changed") is True assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None # We need to escape the data set name because we are using cat, using dcat will # bring the trailing empty spaces according to the data set record length. @@ -2457,7 +2550,7 @@ def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): src=src, dest=dest, remote_src=False, - is_binary=False + binary=False ) verify_copy = hosts.all.shell( @@ -2474,6 +2567,8 @@ def test_copy_file_record_length_to_sequential_data_set(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 for v_recl in verify_recl.contacted.values(): @@ -2510,7 +2605,7 @@ def test_copy_file_crlf_endings_to_sequential_data_set(ansible_zos_module): src=src, dest=dest, remote_src=False, - is_binary=False + binary=False ) verify_copy = hosts.all.shell( @@ -2527,6 +2622,8 @@ def test_copy_file_crlf_endings_to_sequential_data_set(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert len(v_cp.get("stdout_lines")) == 2 @@ -2567,7 +2664,7 @@ def test_copy_file_crlf_endings_and_pound_to_seq_data_set(ansible_zos_module): "to": "IBM-285" }, remote_src=False, - is_binary=False + binary=False ) verify_copy = hosts.all.shell( @@ -2585,6 +2682,8 @@ def test_copy_file_crlf_endings_and_pound_to_seq_data_set(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): print(v_cp) assert v_cp.get("rc") == 0 @@ -2623,13 +2722,15 @@ def test_copy_local_binary_file_without_encoding_conversion(ansible_zos_module): src=src, dest=dest, remote_src=False, - is_binary=True + binary=True ) for cp_res in copy_result.contacted.values(): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None finally: hosts.all.zos_data_set(name=dest, state="absent") os.remove(src) @@ -2665,13 +2766,15 @@ def test_copy_remote_binary_file_without_encoding_conversion(ansible_zos_module) src=src, dest=dest, remote_src=True, - is_binary=True + binary=True ) for cp_res in copy_result.contacted.values(): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None finally: hosts.all.zos_data_set(name=dest, state="absent") hosts.all.file(path=src, state="absent") @@ -2680,12 +2783,12 @@ def test_copy_remote_binary_file_without_encoding_conversion(ansible_zos_module) @pytest.mark.uss @pytest.mark.seq @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=False), - dict(src="/etc/profile", is_file=True, is_binary=True, is_remote=False), - dict(src="Example inline content", is_file=False, is_binary=False, is_remote=False), - dict(src="Example inline content", is_file=False, is_binary=True, is_remote=False), - dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=True), - dict(src="/etc/profile", is_file=True, is_binary=True, is_remote=True), + dict(src="/etc/profile", is_file=True, binary=False, is_remote=False), + dict(src="/etc/profile", is_file=True, binary=True, is_remote=False), + dict(src="Example inline content", is_file=False, binary=False, is_remote=False), + dict(src="Example inline content", is_file=False, binary=True, is_remote=False), + dict(src="/etc/profile", is_file=True, binary=False, is_remote=True), + dict(src="/etc/profile", is_file=True, binary=True, is_remote=True), ]) def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): hosts = ansible_zos_module @@ -2695,9 +2798,9 @@ def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): hosts.all.zos_data_set(name=dest, state="absent") if src["is_file"]: - copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], is_binary=src["is_binary"]) + copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], binary=src["binary"]) else: - copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, remote_src=src["is_remote"], is_binary=src["is_binary"]) + copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, remote_src=src["is_remote"], binary=src["binary"]) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\" > /dev/null 2>/dev/null".format(dest), @@ -2709,7 +2812,7 @@ def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True - assert cp_res.get("is_binary") == src["is_binary"] + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -2719,12 +2822,12 @@ def test_copy_file_to_non_existing_sequential_data_set(ansible_zos_module, src): @pytest.mark.uss @pytest.mark.seq @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_file=True, force=True, is_remote=False), - dict(src="Example inline content", is_file=False, force=True, is_remote=False), - dict(src="/etc/profile", is_file=True, force=True, is_remote=True), - dict(src="/etc/profile", is_file=True, force=False, is_remote=False), - dict(src="Example inline content", is_file=False, force=False, is_remote=False), - dict(src="/etc/profile", is_file=True, force=False, is_remote=True), + dict(src="/etc/profile", is_file=True, replace=True, is_remote=False), + dict(src="Example inline content", is_file=False, replace=True, is_remote=False), + dict(src="/etc/profile", is_file=True, replace=True, is_remote=True), + dict(src="/etc/profile", is_file=True, replace=False, is_remote=False), + dict(src="Example inline content", is_file=False, replace=False, is_remote=False), + dict(src="/etc/profile", is_file=True, replace=False, is_remote=True), ]) def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): hosts = ansible_zos_module @@ -2734,14 +2837,16 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): hosts.all.zos_data_set(name=dest, type="seq", state="present") if src["is_file"]: - copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) + copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], replace=src["replace"]) else: - copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) + copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, remote_src=src["is_remote"], replace=src["replace"]) for result in copy_result.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=dest, state="absent") @@ -2749,10 +2854,10 @@ def test_copy_file_to_empty_sequential_data_set(ansible_zos_module, src): @pytest.mark.uss @pytest.mark.seq @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", force=False, is_remote=False), - dict(src="/etc/profile", force=True, is_remote=False), - dict(src="/etc/profile", force=False, is_remote=True), - dict(src="/etc/profile", force=True, is_remote=True), + dict(src="/etc/profile", replace=False, is_remote=False), + dict(src="/etc/profile", replace=True, is_remote=False), + dict(src="/etc/profile", replace=False, is_remote=True), + dict(src="/etc/profile", replace=True, is_remote=True), ]) def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): hosts = ansible_zos_module @@ -2762,13 +2867,15 @@ def test_copy_file_to_non_empty_sequential_data_set(ansible_zos_module, src): hosts.all.zos_data_set(name=dest, type="seq", state="absent") hosts.all.zos_copy(content="Inline content", dest=dest) - copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], force=src["force"]) + copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, remote_src=src["is_remote"], replace=src["replace"]) for result in copy_result.contacted.values(): - if src["force"]: + if src["replace"]: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -2795,6 +2902,8 @@ def test_copy_ps_to_non_existing_uss_file(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True for result in verify_copy.contacted.values(): @@ -2807,8 +2916,8 @@ def test_copy_ps_to_non_existing_uss_file(ansible_zos_module): @pytest.mark.uss @pytest.mark.seq -@pytest.mark.parametrize("force", [False, True]) -def test_copy_ps_to_existing_uss_file(ansible_zos_module, force): +@pytest.mark.parametrize("replace", [False, True]) +def test_copy_ps_to_existing_uss_file(ansible_zos_module, replace): hosts = ansible_zos_module src_ds = get_tmp_ds_name() dest = get_random_file_name(dir=TMP_DIRECTORY) @@ -2818,17 +2927,19 @@ def test_copy_ps_to_existing_uss_file(ansible_zos_module, force): hosts.all.file(path=dest, state="touch") hosts.all.shell(cmd=f"decho 'test line' '{src_ds}' ") - copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) + copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, replace=replace) stat_res = hosts.all.stat(path=dest) verify_copy = hosts.all.shell( cmd="cat {0}".format(dest), executable=SHELL_EXECUTABLE ) for result in copy_res.contacted.values(): - if force: + if replace: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -2861,6 +2972,9 @@ def test_copy_ps_to_existing_uss_dir(ansible_zos_module): for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True for result in verify_copy.contacted.values(): @@ -2888,6 +3002,7 @@ def test_copy_ps_to_non_existing_ps(ansible_zos_module): assert result.get("changed") is True assert result.get("dest") == dest assert result.get("dest_created") is True + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" @@ -2896,8 +3011,8 @@ def test_copy_ps_to_non_existing_ps(ansible_zos_module): @pytest.mark.seq -@pytest.mark.parametrize("force", [False, True]) -def test_copy_ps_to_empty_ps(ansible_zos_module, force): +@pytest.mark.parametrize("replace", [False, True]) +def test_copy_ps_to_empty_ps(ansible_zos_module, replace): hosts = ansible_zos_module src_ds = get_tmp_ds_name() dest = get_tmp_ds_name() @@ -2906,7 +3021,7 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): hosts.all.shell(cmd=f"decho 'test line ' '{src_ds}'") hosts.all.shell(cmd=f"dtouch -tseq '{src_ds}'") - copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) + copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, replace=replace) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), executable=SHELL_EXECUTABLE ) @@ -2915,6 +3030,8 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" @@ -2923,8 +3040,8 @@ def test_copy_ps_to_empty_ps(ansible_zos_module, force): @pytest.mark.seq -@pytest.mark.parametrize("force", [False, True]) -def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): +@pytest.mark.parametrize("replace", [False, True]) +def test_copy_ps_to_non_empty_ps(ansible_zos_module, replace): hosts = ansible_zos_module src_ds = get_tmp_ds_name() dest = get_tmp_ds_name() @@ -2933,16 +3050,18 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): hosts.all.shell(cmd=f"decho 'This is a test ' '{src_ds}' ") hosts.all.shell(cmd=f"decho 'This is a test ' '{dest}' ") - copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) + copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, replace=replace) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), executable=SHELL_EXECUTABLE ) for result in copy_res.contacted.values(): - if force: + if replace: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -2955,8 +3074,8 @@ def test_copy_ps_to_non_empty_ps(ansible_zos_module, force): @pytest.mark.seq -@pytest.mark.parametrize("force", [False, True]) -def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): +@pytest.mark.parametrize("replace", [False, True]) +def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, replace): hosts = ansible_zos_module src_ds = get_tmp_ds_name() dest = get_tmp_ds_name() @@ -2965,16 +3084,18 @@ def test_copy_ps_to_non_empty_ps_with_special_chars(ansible_zos_module, force): hosts.all.shell(cmd=f"decho '{DUMMY_DATA_SPECIAL_CHARS}' '{src_ds}' ") hosts.all.shell(cmd=f"decho '{DUMMY_DATA_SPECIAL_CHARS}' '{dest}' ") - copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, force=force) + copy_res = hosts.all.zos_copy(src=src_ds, dest=dest, remote_src=True, replace=replace) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), executable=SHELL_EXECUTABLE ) for result in copy_res.contacted.values(): - if force: + if replace: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -2997,13 +3118,17 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): if backup: backup_name = get_tmp_ds_name() - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup_name) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=True, backup=True, backup_name=backup_name) else: - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=True, backup=True) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("backup_name") is not None + assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None result_backup_name = result.get("backup_name") if backup: assert backup_name == result.get("backup_name") @@ -3026,12 +3151,12 @@ def test_backup_sequential_data_set(ansible_zos_module, backup): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=False), - dict(src="/etc/profile", is_file=True, is_binary=True, is_remote=False), - dict(src="Example inline content", is_file=False, is_binary=False, is_remote=False), - dict(src="Example inline content", is_file=False, is_binary=True, is_remote=False), - dict(src="/etc/profile", is_file=True, is_binary=False, is_remote=True), - dict(src="/etc/profile", is_file=True, is_binary=True, is_remote=True), + dict(src="/etc/profile", is_file=True, binary=False, is_remote=False), + dict(src="/etc/profile", is_file=True, binary=True, is_remote=False), + dict(src="Example inline content", is_file=False, binary=False, is_remote=False), + dict(src="Example inline content", is_file=False, binary=True, is_remote=False), + dict(src="/etc/profile", is_file=True, binary=False, is_remote=True), + dict(src="/etc/profile", is_file=True, binary=True, is_remote=True), ]) def test_copy_file_to_non_existing_member(ansible_zos_module, src): hosts = ansible_zos_module @@ -3050,9 +3175,9 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): ) if src["is_file"]: - copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, is_binary=src["is_binary"], remote_src=src["is_remote"]) + copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, binary=src["binary"], remote_src=src["is_remote"]) else: - copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, is_binary=src["is_binary"]) + copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, binary=src["binary"]) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\" > /dev/null 2>/dev/null".format(dest), @@ -3063,6 +3188,8 @@ def test_copy_file_to_non_existing_member(ansible_zos_module, src): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -3103,6 +3230,8 @@ def test_copy_file_to_non_existing_member_implicit(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_member + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -3112,12 +3241,12 @@ def test_copy_file_to_non_existing_member_implicit(ansible_zos_module): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("src", [ - dict(src="/etc/profile", is_file=True, force=False, is_remote=False), - dict(src="/etc/profile", is_file=True, force=True, is_remote=False), - dict(src="Example inline content", is_file=False, force=False, is_remote=False), - dict(src="Example inline content", is_file=False, force=True, is_remote=False), - dict(src="/etc/profile", is_file=True, force=False, is_remote=True), - dict(src="/etc/profile", is_file=True, force=True, is_remote=True) + dict(src="/etc/profile", is_file=True, replace=False, is_remote=False), + dict(src="/etc/profile", is_file=True, replace=True, is_remote=False), + dict(src="Example inline content", is_file=False, replace=False, is_remote=False), + dict(src="Example inline content", is_file=False, replace=True, is_remote=False), + dict(src="/etc/profile", is_file=True, replace=False, is_remote=True), + dict(src="/etc/profile", is_file=True, replace=True, is_remote=True) ]) def test_copy_file_to_existing_member(ansible_zos_module, src): hosts = ansible_zos_module @@ -3137,9 +3266,9 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): hosts.all.zos_data_set(name=dest, type="member", state="present") if src["is_file"]: - copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, force=src["force"], remote_src=src["is_remote"]) + copy_result = hosts.all.zos_copy(src=src["src"], dest=dest, replace=src["replace"], remote_src=src["is_remote"]) else: - copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, force=src["force"]) + copy_result = hosts.all.zos_copy(content=src["src"], dest=dest, replace=src["replace"]) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\" > /dev/null 2>/dev/null".format(dest), @@ -3147,10 +3276,12 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): ) for cp_res in copy_result.contacted.values(): - if src["force"]: + if src["replace"]: assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None else: assert cp_res.get("msg") is not None assert cp_res.get("changed") is False @@ -3163,12 +3294,12 @@ def test_copy_file_to_existing_member(ansible_zos_module, src): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", is_binary=False), - dict(type="seq", is_binary=True), - dict(type="pds", is_binary=False), - dict(type="pds", is_binary=True), - dict(type="pdse", is_binary=False), - dict(type="pdse", is_binary=True) + dict(type="seq", binary=False), + dict(type="seq", binary=True), + dict(type="pds", binary=False), + dict(type="pds", binary=True), + dict(type="pdse", binary=False), + dict(type="pdse", binary=True) ]) def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): hosts = ansible_zos_module @@ -3188,7 +3319,7 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): ) hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) - copy_result = hosts.all.zos_copy(src=src, dest=dest, is_binary=args["is_binary"], remote_src=True) + copy_result = hosts.all.zos_copy(src=src, dest=dest, binary=args["binary"], remote_src=True) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), @@ -3199,6 +3330,8 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -3210,12 +3343,12 @@ def test_copy_data_set_to_non_existing_member(ansible_zos_module, args): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="seq", force=False), - dict(type="seq", force=True), - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True) + dict(type="seq", replace=False), + dict(type="seq", replace=True), + dict(type="pds", replace=False), + dict(type="pds", replace=True), + dict(type="pdse", replace=False), + dict(type="pdse", replace=True) ]) def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts = ansible_zos_module @@ -3236,7 +3369,7 @@ def test_copy_data_set_to_existing_member(ansible_zos_module, args): hosts.all.zos_data_set(name=dest_data_set, type="pdse", replace=True) hosts.all.zos_data_set(name=dest, type="member") - copy_result = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) + copy_result = hosts.all.zos_copy(src=src, dest=dest, replace=args["replace"], remote_src=True) verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), @@ -3244,16 +3377,18 @@ def test_copy_data_set_to_existing_member(ansible_zos_module, args): ) for cp_res in copy_result.contacted.values(): - if args["force"]: + if args["replace"]: assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None else: assert cp_res.get("msg") is not None assert cp_res.get("changed") is False for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 - if args["force"]: + if args["replace"]: assert v_cp.get("stdout") != "" finally: hosts.all.zos_data_set(name=src_data_set, state="absent") @@ -3283,6 +3418,7 @@ def test_copy_file_to_non_existing_pdse(ansible_zos_module, is_remote): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_path assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -3312,6 +3448,7 @@ def test_copy_dir_to_non_existing_pdse(ansible_zos_module): assert result.get("changed") is True assert result.get("dest") == dest assert result.get("dest_created") is True + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("rc") == 0 finally: @@ -3344,6 +3481,7 @@ def test_copy_dir_crlf_endings_to_non_existing_pdse(ansible_zos_module): assert result.get("changed") is True assert result.get("dest") == dest assert result.get("dest_created") is True + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert len(result.get("stdout_lines")) == 2 @@ -3384,6 +3522,8 @@ def test_copy_dir_to_existing_pdse(ansible_zos_module, src_type): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -3424,6 +3564,7 @@ def test_copy_data_set_to_non_existing_pdse(ansible_zos_module, src_type): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -3458,6 +3599,8 @@ def test_copy_pds_to_existing_pds(ansible_zos_module, args): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 @@ -3564,11 +3707,15 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member(ansible_zos_module, is_cr assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}({1})".format(dest_lib, pgm_mem) + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in copy_res_aliases.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}({1})".format(dest_lib_aliases, pgm_mem) + assert result.get("dest_created") is not None + assert result.get("src") is not None # check ALIAS keyword and name in mls output verify_copy_mls = hosts.all.shell( @@ -3677,10 +3824,13 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): dest=uss_dest, remote_src=True, executable=True, - force=True) + replace=True) for result in copy_uss_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None # run executable on USS verify_exe_uss = hosts.all.shell( @@ -3712,10 +3862,14 @@ def test_copy_pds_loadlib_member_to_uss_to_loadlib(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}({1})".format(dest_lib, pgm_mem) + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in copy_res_aliases.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}({1})".format(dest_lib_aliases, pgm_mem) + assert result.get("dest_created") is not None + assert result.get("src") is not None # check ALIAS keyword and name in mls output verify_copy_mls = hosts.all.shell( @@ -3894,11 +4048,15 @@ def test_copy_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(dest_lib) + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in copy_res_aliases.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(dest_lib_aliases) + assert result.get("dest_created") is not None + assert result.get("src") is not None # check ALIAS keyword and name in mls output verify_copy_mls = hosts.all.shell( @@ -4079,6 +4237,8 @@ def test_copy_local_pds_loadlib_to_pds_loadlib(ansible_zos_module, is_created): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(dest_lib) + assert result.get("dest_created") is not None + assert result.get("src") is not None # check ALIAS keyword and name in mls output verify_copy_mls = hosts.all.shell( @@ -4199,6 +4359,8 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(uss_dir_path) + assert result.get("dest_created") is not None + assert result.get("src") is not None # inspect USS dir contents verify_exe_uss_ls = hosts.all.shell( @@ -4245,11 +4407,16 @@ def test_copy_pds_loadlib_to_uss_to_pds_loadlib(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(dest_lib) + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in copy_res_aliases.contacted.values(): + print(result) assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(dest_lib_aliases) + assert result.get("dest_created") is not None + assert result.get("src") is not None # check ALIAS keyword and name in mls output verify_copy_mls = hosts.all.shell( @@ -4314,12 +4481,15 @@ def test_copy_executables_uss_to_uss(ansible_zos_module): dest=dest_uss, remote_src=True, executable=True, - force=True + replace=True ) verify_exe_dst = hosts.all.shell(cmd=f"{c_dir}/hello_world_2") for result in copy_uss_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None for res in verify_exe_dst.contacted.values(): assert res.get("rc") == 0 stdout = res.get("stdout") @@ -4358,7 +4528,7 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): dest="{0}({1})".format(dest, member), remote_src=True, executable=True, - force=True + replace=True ) cmd = "mvscmd --pgm={0} --steplib={1} --sysprint=* --stderr=* --stdout=*" exec_res = hosts.all.shell( @@ -4367,6 +4537,9 @@ def test_copy_executables_uss_to_member(ansible_zos_module, is_created): for result in copy_uss_to_mvs_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None for res in exec_res.contacted.values(): assert res.get("rc") == 0 stdout = res.get("stdout") @@ -4410,6 +4583,8 @@ def test_copy_pds_member_with_system_symbol(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 @@ -4446,6 +4621,8 @@ def test_copy_multiple_data_set_members(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None verify_copy = hosts.all.shell( cmd="mls {0}".format(dest), @@ -4493,6 +4670,8 @@ def test_copy_multiple_data_set_members_in_loop(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_member + assert result.get("dest_created") is not None + assert result.get("src") is not None verify_copy = hosts.all.shell( cmd="mls {0}".format(dest), @@ -4537,6 +4716,8 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True for result in verify_copy.contacted.values(): @@ -4550,10 +4731,10 @@ def test_copy_member_to_non_existing_uss_file(ansible_zos_module, ds_type): @pytest.mark.uss @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(ds_type="pds", force=False), - dict(ds_type="pds", force=True), - dict(ds_type="pdse", force=False), - dict(ds_type="pdse", force=True) + dict(ds_type="pds", replace=False), + dict(ds_type="pds", replace=True), + dict(ds_type="pdse", replace=False), + dict(ds_type="pdse", replace=True) ]) def test_copy_member_to_existing_uss_file(ansible_zos_module, args): hosts = ansible_zos_module @@ -4569,17 +4750,19 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): executable=SHELL_EXECUTABLE ) - copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True, force=args["force"]) + copy_res = hosts.all.zos_copy(src=src, dest=dest, remote_src=True, replace=args["replace"]) stat_res = hosts.all.stat(path=dest) verify_copy = hosts.all.shell( cmd="head {0}".format(dest), executable=SHELL_EXECUTABLE ) for result in copy_res.contacted.values(): - if args["force"]: + if args["replace"]: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -4587,7 +4770,7 @@ def test_copy_member_to_existing_uss_file(ansible_zos_module, args): assert result.get("stat").get("exists") is True for result in verify_copy.contacted.values(): assert result.get("rc") == 0 - if args["force"]: + if args["replace"]: assert result.get("stdout") != "" finally: hosts.all.zos_data_set(name=data_set, state="absent") @@ -4631,6 +4814,8 @@ def test_copy_pdse_to_uss_dir(ansible_zos_module, src_type): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True assert result.get("stat").get("isdir") is True @@ -4677,6 +4862,8 @@ def test_copy_member_to_uss_dir(ansible_zos_module, src_type): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in stat_res.contacted.values(): assert result.get("stat").get("exists") is True for result in verify_copy.contacted.values(): @@ -4714,6 +4901,7 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): assert result.get("changed") is True assert result.get("dest") == dest assert result.get("dest_created") is True + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" @@ -4725,10 +4913,10 @@ def test_copy_member_to_non_existing_seq_data_set(ansible_zos_module, src_type): @pytest.mark.seq @pytest.mark.pdse @pytest.mark.parametrize("args", [ - dict(type="pds", force=False), - dict(type="pds", force=True), - dict(type="pdse", force=False), - dict(type="pdse", force=True), + dict(type="pds", replace=False), + dict(type="pds", replace=True), + dict(type="pdse", replace=False), + dict(type="pdse", replace=True), ]) def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): hosts = ansible_zos_module @@ -4746,22 +4934,24 @@ def test_copy_member_to_existing_seq_data_set(ansible_zos_module, args): executable=SHELL_EXECUTABLE ) - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=args["force"], remote_src=True) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=args["replace"], remote_src=True) verify_copy = hosts.all.shell( cmd="head \"//'{0}'\"".format(dest), executable=SHELL_EXECUTABLE ) for result in copy_res.contacted.values(): - if args["force"]: + if args["replace"]: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False for result in verify_copy.contacted.values(): assert result.get("rc") == 0 - if args["force"]: + if args["replace"]: assert result.get("stdout") != "" finally: hosts.all.zos_data_set(name=src_ds, state="absent") @@ -4804,6 +4994,8 @@ def test_copy_file_to_member_convert_encoding(ansible_zos_module, dest_type): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" @@ -4831,14 +5023,16 @@ def test_backup_pds(ansible_zos_module, args): if args["backup"]: backup_name = get_tmp_ds_name() - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True, backup_name=backup_name) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=True, backup=True, backup_name=backup_name) else: - copy_res = hosts.all.zos_copy(src=src, dest=dest, force=True, backup=True) + copy_res = hosts.all.zos_copy(src=src, dest=dest, replace=True, backup=True) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None result_backup_name = result.get("backup_name") assert result_backup_name is not None @@ -4893,6 +5087,8 @@ def test_copy_data_set_to_volume(ansible_zos_module, volumes_on_systems, src_typ assert cp.get('msg') is None assert cp.get('changed') is True assert cp.get('dest') == dest + assert cp.get("dest_created") is not None + assert cp.get("src") is not None check_vol = hosts.all.shell( cmd="tsocmd \"LISTDS '{0}'\"".format(dest), @@ -4922,6 +5118,8 @@ def test_copy_ksds_to_non_existing_ksds(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_ds + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("dd_names") is not None dd_names = result.get("dd_names") @@ -4938,8 +5136,8 @@ def test_copy_ksds_to_non_existing_ksds(ansible_zos_module): ) @pytest.mark.vsam -@pytest.mark.parametrize("force", [False, True]) -def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): +@pytest.mark.parametrize("replace", [False, True]) +def test_copy_ksds_to_existing_ksds(ansible_zos_module, replace): hosts = ansible_zos_module src_ds = get_tmp_ds_name() dest_ds = get_tmp_ds_name() @@ -4948,14 +5146,16 @@ def test_copy_ksds_to_existing_ksds(ansible_zos_module, force): create_vsam_data_set(hosts, src_ds, "ksds", add_data=True, key_length=12, key_offset=0) create_vsam_data_set(hosts, dest_ds, "ksds", add_data=True, key_length=12, key_offset=0) - copy_res = hosts.all.zos_copy(src=src_ds, dest=dest_ds, remote_src=True, force=force) + copy_res = hosts.all.zos_copy(src=src_ds, dest=dest_ds, remote_src=True, replace=replace) verify_copy = get_listcat_information(hosts, dest_ds, "ksds") for result in copy_res.contacted.values(): - if force: + if replace: assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest_ds + assert result.get("dest_created") is not None + assert result.get("src") is not None else: assert result.get("msg") is not None assert result.get("changed") is False @@ -4986,13 +5186,16 @@ def test_backup_ksds(ansible_zos_module, backup): if backup: backup_name = get_tmp_ds_name() - copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, backup_name=backup_name, remote_src=True, force=True) + copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, backup_name=backup_name, remote_src=True, replace=True) else: - copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, remote_src=True, force=True) + copy_res = hosts.all.zos_copy(src=src, dest=dest, backup=True, remote_src=True, replace=True) for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None result_backup_name = result.get("backup_name") assert result_backup_name is not None @@ -5042,6 +5245,12 @@ def test_copy_ksds_to_volume(ansible_zos_module, volumes_on_systems): ) verify_copy = get_listcat_information(hosts, dest_ds, "ksds") + for result in copy_res.contacted.values(): + assert result.get("msg") is None + assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in verify_copy.contacted.values(): assert result.get("dd_names") is not None dd_names = result.get("dd_names") @@ -5098,6 +5307,8 @@ def test_dest_data_set_parameters(ansible_zos_module, volumes_on_systems): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == dest + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in verify_copy.contacted.values(): # The tsocmd returns 5 lines like this: # USER.TEST.DEST @@ -5136,6 +5347,9 @@ def test_ensure_tmp_cleanup(ansible_zos_module): for result in copy_res.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None stat_dir = hosts.all.shell( cmd="ls", @@ -5153,32 +5367,36 @@ def test_ensure_tmp_cleanup(ansible_zos_module): @pytest.mark.vsam -@pytest.mark.parametrize("force", [False, True]) -def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option(ansible_zos_module, force): +@pytest.mark.parametrize("replace", [False, True]) +def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option(ansible_zos_module, replace): hosts = ansible_zos_module dest = get_tmp_ds_name() src_file = "/etc/profile" tmphlq = "TMPHLQ" try: hosts.all.zos_data_set(name=dest, type="seq", state="present") - copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, force=force) - copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, force=force) + copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, replace=replace) + copy_result = hosts.all.zos_copy(src=src_file, dest=dest, remote_src=True, backup=True, tmp_hlq=tmphlq, replace=replace) verify_copy = None - if force: + if replace: verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\" > /dev/null 2>/dev/null".format(dest), executable=SHELL_EXECUTABLE, ) for cp_res in copy_result.contacted.values(): - if force: + if replace: assert cp_res.get("msg") is None assert cp_res.get("backup_name")[:6] == tmphlq + assert cp_res.get("changed") is True + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None else: assert cp_res.get("msg") is not None assert cp_res.get("changed") is False - if force: + if replace: for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 finally: @@ -5188,11 +5406,11 @@ def test_copy_uss_file_to_existing_sequential_data_set_twice_with_tmphlq_option( @pytest.mark.parametrize("options", [ dict(src="/etc/profile", - force=True, is_remote=False, verbosity="-vvvvv", verbosity_level=5), - dict(src="/etc/profile", force=True, + replace=True, is_remote=False, verbosity="-vvvvv", verbosity_level=5), + dict(src="/etc/profile", replace=True, is_remote=False, verbosity="-vvvv", verbosity_level=4), dict(src="/etc/profile", - force=True, is_remote=False, verbosity="", verbosity_level=0), + replace=True, is_remote=False, verbosity="", verbosity_level=0), ]) def test_display_verbosity_in_zos_copy_plugin(ansible_zos_module, options): """Test the display verbosity, ensure it matches the verbosity_level. @@ -5284,6 +5502,8 @@ def test_copy_seq_gds_to_data_set(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert cp_res.get("dest") == dest_data_set + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5321,6 +5541,8 @@ def test_copy_data_set_to_new_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5356,6 +5578,8 @@ def test_copy_uss_file_to_new_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5394,6 +5618,8 @@ def test_copy_pds_to_new_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5421,7 +5647,7 @@ def test_copy_data_set_to_previous_gds(ansible_zos_module): src=src_data_set, dest=f"{dest_data_set}(0)", remote_src=True, - force=True + replace=True ) verify_copy = hosts.all.shell(cmd=f"""dcat "{dest_data_set}(0)" """) @@ -5435,6 +5661,8 @@ def test_copy_data_set_to_previous_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5459,7 +5687,7 @@ def test_copy_uss_file_to_previous_gds(ansible_zos_module): src=src_file, dest=f"{dest_data_set}(0)", remote_src=True, - force=True + replace=True ) verify_copy = hosts.all.shell(cmd=f"""dcat "{dest_data_set}(0)" """) @@ -5473,6 +5701,8 @@ def test_copy_uss_file_to_previous_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5500,7 +5730,7 @@ def test_copy_pds_member_to_previous_gds(ansible_zos_module): src=member_src, dest=f"{dest_data_set}(0)", remote_src=True, - force=True + replace=True ) verify_copy = hosts.all.shell(cmd=f"""dcat "{dest_data_set}(0)" """) @@ -5514,6 +5744,8 @@ def test_copy_pds_member_to_previous_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5541,7 +5773,7 @@ def test_copy_pds_to_previous_gds(ansible_zos_module): src=src_data_set, dest=f"{dest_data_set}(0)", remote_src=True, - force=True + replace=True ) verify_copy = hosts.all.shell(cmd=f"""mls "{dest_data_set}(0)" """) @@ -5555,6 +5787,8 @@ def test_copy_pds_to_previous_gds(ansible_zos_module): assert cp_res.get("msg") is None assert cp_res.get("changed") is True assert re.fullmatch(gds_pattern, dest.split(".")[-1]) + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") != "" @@ -5582,7 +5816,7 @@ def test_copy_data_set_to_previous_gds_no_force(ansible_zos_module): src=src_data_set, dest=f"{dest_data_set}(0)", remote_src=True, - force=False + replace=False ) for cp_res in copy_results.contacted.values(): @@ -5615,7 +5849,7 @@ def test_copy_data_set_to_previous_non_existent_gds(ansible_zos_module, generati # Copying to a previous generation that doesn't exist. dest=f"{dest_data_set}({generation})", remote_src=True, - force=True + replace=True ) for cp_res in copy_results.contacted.values(): @@ -5653,6 +5887,9 @@ def test_copy_gdg_to_uss_dir(ansible_zos_module): for cp_res in copy_results.contacted.values(): assert cp_res.get("msg") is None assert cp_res.get("changed") is True + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None for v_res in verify_dest.contacted.values(): assert v_res.get("rc") == 0 assert len(v_res.get("stdout_lines", [])) > 0 @@ -5689,6 +5926,9 @@ def test_copy_gdg_to_gdg(ansible_zos_module, new_gdg): for cp_res in copy_results.contacted.values(): assert cp_res.get("msg") is None assert cp_res.get("changed") is True + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None finally: hosts.all.shell(cmd=f"""drm "{src_data_set}(-1)" """) hosts.all.shell(cmd=f"""drm "{src_data_set}(0)" """) @@ -5727,6 +5967,9 @@ def test_identical_gdg_copy(ansible_zos_module): for result in copy_results.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None finally: src_gdg_result = hosts.all.shell(cmd=f"dls {src_data_set}.*") src_gdgs = [] @@ -5780,6 +6023,9 @@ def test_copy_gdg_to_gdg_dest_attributes(ansible_zos_module): for cp_res in copy_results.contacted.values(): assert cp_res.get("msg") is None assert cp_res.get("changed") is True + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None finally: hosts.all.shell(cmd=f"""drm "{src_data_set}(-1)" """) hosts.all.shell(cmd=f"""drm "{src_data_set}(0)" """) @@ -5809,7 +6055,7 @@ def test_backup_gds(ansible_zos_module): src=src_data_set, dest=dest_data_set, remote_src=True, - force=True, + replace=True, backup=True, backup_name=f"{backup_data_set}(+1)", ) @@ -5821,6 +6067,9 @@ def test_backup_gds(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in backup_check.contacted.values(): assert result.get("rc") == 0 @@ -5854,7 +6103,7 @@ def test_backup_gds_invalid_generation(ansible_zos_module): src=src_data_set, dest=dest_data_set, remote_src=True, - force=True, + replace=True, backup=True, backup_name=f"{backup_data_set}(0)", ) @@ -5891,6 +6140,9 @@ def test_copy_to_dataset_with_special_symbols(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=src_data_set, state="absent") @@ -6000,6 +6252,10 @@ def test_copy_data_set_seq_with_aliases(ansible_zos_module, volumes_on_systems): for result in zos_copy_result.contacted.values(): assert result.get('changed') is True assert result.get('failed', False) is False + assert result.get("msg") is None + assert result.get("dest") is not None + assert result.get("dest_created") is not None + assert result.get("src") is not None verify_copy = hosts.all.shell( cmd="cat \"//'{0}'\"".format(dest), executable=SHELL_EXECUTABLE, @@ -6040,6 +6296,9 @@ def test_copy_pds_to_pds_using_dest_alias(ansible_zos_module): for cp_res in copy_results.contacted.values(): assert cp_res.get("msg") is None assert cp_res.get("changed") is True + assert cp_res.get("dest") is not None + assert cp_res.get("dest_created") is not None + assert cp_res.get("src") is not None verify_dest = hosts.all.shell( cmd=f"""dcat "{dest_pds}(MEMBER)" """, @@ -6152,6 +6411,8 @@ def test_copy_pdse_loadlib_to_pdse_loadlib_using_aliases(ansible_zos_module): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}".format(dest_lib_aliases) + assert result.get("dest_created") is not None + assert result.get("src") is not None verify_copy_mls_aliases = hosts.all.shell( cmd="mls {0}".format(dest_lib), @@ -6214,6 +6475,7 @@ def test_copy_asa_file_to_asa_sequential_with_pound(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -6267,6 +6529,7 @@ def test_copy_seq_data_set_to_seq_asa_with_pounds(ansible_zos_module): assert cp_res.get("changed") is True assert cp_res.get("dest") == dest assert cp_res.get("dest_created") is True + assert cp_res.get("src") is not None for v_cp in verify_copy.contacted.values(): assert v_cp.get("rc") == 0 assert v_cp.get("stdout") == ASA_SAMPLE_RETURN @@ -6373,11 +6636,15 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member_with_pound(ansible_zos_mo assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}({1})".format(dest_lib, dest_pgm_mem) + assert result.get("dest_created") is not None + assert result.get("src") is not None for result in copy_res_aliases.contacted.values(): assert result.get("msg") is None assert result.get("changed") is True assert result.get("dest") == "{0}({1})".format(dest_lib_aliases, dest_pgm_mem) + assert result.get("dest_created") is not None + assert result.get("src") is not None # check ALIAS keyword and name in mls output verify_copy_mls = hosts.all.shell( @@ -6413,4 +6680,3 @@ def test_copy_pds_loadlib_member_to_pds_loadlib_member_with_pound(ansible_zos_mo hosts.all.zos_data_set(name=src_lib, state="absent") hosts.all.zos_data_set(name=dest_lib, state="absent") hosts.all.zos_data_set(name=dest_lib_aliases, state="absent") - \ No newline at end of file diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index 1e3f3faa94..74fbe084ea 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -162,7 +162,7 @@ def test_data_set_catalog_and_uncatalog(ansible_zos_module, jcl, volumes_on_syst hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 + src=TEMP_PATH + "/SAMPLE", remote_src=True, wait_time=30 ) # verify data set creation was successful @@ -221,7 +221,7 @@ def test_data_set_present_when_uncataloged(ansible_zos_module, jcl, volumes_on_s hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 + src=TEMP_PATH + "/SAMPLE", remote_src=True, wait_time=30 ) # verify data set creation was successful for result in results.contacted.values(): @@ -267,7 +267,7 @@ def test_data_set_replacement_when_uncataloged(ansible_zos_module, jcl, volumes_ hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 + src=TEMP_PATH + "/SAMPLE", remote_src=True, wait_time=30 ) # verify data set creation was successful for result in results.contacted.values(): @@ -315,7 +315,7 @@ def test_data_set_absent_when_uncataloged(ansible_zos_module, jcl, volumes_on_sy hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) results = hosts.all.zos_job_submit( - src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30 + src=TEMP_PATH + "/SAMPLE", remote_src=True, wait_time=30 ) # verify data set creation was successful for result in results.contacted.values(): @@ -356,7 +356,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present( hosts.all.file(path=TEMP_PATH, state="directory") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_1, dataset)), TEMP_PATH)) - results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30) + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", remote_src=True, wait_time=30) # verify data set creation was successful for result in results.contacted.values(): @@ -371,7 +371,7 @@ def test_data_set_absent_when_uncataloged_and_same_name_cataloged_is_present( hosts.all.file(path=TEMP_PATH + "/SAMPLE", state="absent") hosts.all.shell(cmd=ECHO_COMMAND.format(quote(jcl.format(volume_2, dataset)), TEMP_PATH)) - results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", location="uss", wait_time_s=30) + results = hosts.all.zos_job_submit(src=TEMP_PATH + "/SAMPLE", remote_src=True, wait_time=30) # verify data set creation was successful for result in results.contacted.values(): diff --git a/tests/functional/modules/test_zos_encode_func.py b/tests/functional/modules/test_zos_encode_func.py index 8be2107455..89965848c8 100644 --- a/tests/functional/modules/test_zos_encode_func.py +++ b/tests/functional/modules/test_zos_encode_func.py @@ -245,6 +245,10 @@ def test_uss_encoding_conversion_uss_file_to_uss_file(ansible_zos_module): assert result.get("dest") == uss_dest_file assert result.get("backup_name") is None assert result.get("changed") is True + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_file}") for result in tag_results.contacted.values(): @@ -274,6 +278,9 @@ def test_uss_encoding_conversion_uss_file_to_uss_path(ansible_zos_module): assert result.get("dest") == uss_dest_path assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_path}/{path.basename(uss_file)}") for result in tag_results.contacted.values(): @@ -306,6 +313,9 @@ def test_uss_encoding_conversion_uss_path_to_uss_path(ansible_zos_module): assert result.get("dest") == uss_dest_path assert result.get("backup_name") is not None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_path}") for result in tag_results.contacted.values(): @@ -338,6 +348,9 @@ def test_uss_encoding_conversion_uss_file_to_mvs_ps(ansible_zos_module): assert result.get("dest") == mvs_ps assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING finally: hosts.all.file(path=uss_file, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -365,6 +378,9 @@ def test_uss_encoding_conversion_mvs_ps_to_uss_file(ansible_zos_module): assert result.get("dest") == uss_dest_file assert result.get("backup_name") is not None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_file}") for result in tag_results.contacted.values(): @@ -397,6 +413,9 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds(ansible_zos_module): assert result.get("dest") == mvs_ps assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING finally: hosts.all.file(path=uss_file, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -435,6 +454,9 @@ def test_uss_encoding_conversion_uss_file_to_mvs_pds_member(ansible_zos_module): assert result.get("dest") == mvs_pds_member assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING finally: hosts.all.file(path=uss_file, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -471,6 +493,9 @@ def test_uss_encoding_conversion_mvs_pds_member_to_uss_file(ansible_zos_module): assert result.get("dest") == uss_dest_file assert result.get("backup_name") is not None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_file}") for result in tag_results.contacted.values(): @@ -509,6 +534,10 @@ def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): assert result.get("dest") == mvs_ps assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING + hosts.all.file(path=uss_dest_path, state="directory") results = hosts.all.zos_encode( src=mvs_ps, @@ -524,6 +553,9 @@ def test_uss_encoding_conversion_uss_path_to_mvs_pds(ansible_zos_module): assert result.get("dest") == uss_dest_path assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_path}") for result in tag_results.contacted.values(): @@ -559,6 +591,9 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_pds_member(ansible_zos_module): assert result.get("dest") == mvs_pds_member assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING hosts.all.zos_data_set(name=mvs_ps, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -575,10 +610,11 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): cmd=f"echo {quote(KSDS_CREATE_JCL.format(mvs_vs))} > {temp_jcl_path}/SAMPLE" ) results = hosts.all.zos_job_submit( - src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", remote_src=True, wait_time=30 ) for result in results.contacted.values(): + print(result) assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True @@ -595,6 +631,9 @@ def test_uss_encoding_conversion_uss_file_to_mvs_vsam(ansible_zos_module): assert result.get("dest") == mvs_vs assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING finally: hosts.all.file(path=temp_jcl_path, state="absent") hosts.all.file(path=uss_file, state="absent") @@ -623,6 +662,9 @@ def test_uss_encoding_conversion_mvs_vsam_to_uss_file(ansible_zos_module): assert result.get("dest") == uss_dest_file assert result.get("backup_name") is not None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING tag_results = hosts.all.shell(cmd=f"ls -T {uss_dest_file}") for result in tag_results.contacted.values(): @@ -658,6 +700,10 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_ps(ansible_zos_module): assert result.get("dest") == mvs_ps assert result.get("backup_name") is None assert result.get("changed") is True + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING + hosts.all.zos_data_set(name=mvs_vs, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -691,6 +737,10 @@ def test_uss_encoding_conversion_mvs_vsam_to_mvs_pds_member(ansible_zos_module): assert result.get("dest") == mvs_pds_member assert result.get("backup_name") is None assert result.get("changed") is True + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING hosts.all.zos_data_set(name=mvs_vs, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -707,7 +757,7 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): cmd=f"echo {quote(KSDS_CREATE_JCL.format(mvs_vs))} > {temp_jcl_path}/SAMPLE" ) results = hosts.all.zos_job_submit( - src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", remote_src=True, wait_time=30 ) for result in results.contacted.values(): assert result.get("jobs") is not None @@ -728,6 +778,10 @@ def test_uss_encoding_conversion_mvs_ps_to_mvs_vsam(ansible_zos_module): assert result.get("dest") == mvs_vs assert result.get("backup_name") is None assert result.get("changed") is True + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING finally: hosts.all.file(path=temp_jcl_path, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -755,6 +809,10 @@ def test_uss_encoding_conversion_src_with_special_chars(ansible_zos_module): assert result.get("backup_name") is None assert result.get("changed") is True assert result.get("msg") is None + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING finally: hosts.all.zos_data_set(name=src_data_set, state="absent") @@ -813,6 +871,10 @@ def test_pds_backup_with_tmp_hlq_option(ansible_zos_module): ) for enc_res in encode_res.contacted.values(): assert enc_res.get("backup_name")[:6] == tmphlq + assert enc_res.get("encoding") is not None + assert isinstance(enc_res.get("encoding"), dict) + assert enc_res.get("encoding").get("to") == FROM_ENCODING + assert enc_res.get("encoding").get("from") == TO_ENCODING contents = hosts.all.shell(cmd="cat \"//'{0}(SAMPLE)'\"".format(enc_res.get("backup_name"))) hosts.all.file(path=temp_jcl_path, state="absent") hosts.all.zos_data_set(name=mvs_ps, state="absent") @@ -873,7 +935,7 @@ def test_vsam_backup(ansible_zos_module): cmd=f"echo {quote(KSDS_CREATE_JCL.format(mvs_vs))} > {temp_jcl_path}/SAMPLE" ) hosts.all.zos_job_submit( - src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", remote_src=True, wait_time=30 ) hosts.all.file(path=temp_jcl_path, state="absent") # submit JCL to populate KSDS @@ -882,7 +944,7 @@ def test_vsam_backup(ansible_zos_module): cmd=f"echo {quote(KSDS_REPRO_JCL.format(mvs_vs.upper()))} > {temp_jcl_path}/SAMPLE" ) hosts.all.zos_job_submit( - src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", remote_src=True, wait_time=30 ) hosts.all.zos_encode( @@ -958,6 +1020,10 @@ def test_uss_backup_entire_folder_to_default_backup_location(ansible_zos_module) backup_name = None for result in results.contacted.values(): backup_name = result.get("backup_name") + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING assert backup_name contents = hosts.all.shell(cmd=f"cat {backup_name}file1") content1 = "" @@ -1026,6 +1092,10 @@ def test_uss_backup_entire_folder_to_default_backup_location_compressed( backup_name = None for result in results.contacted.values(): backup_name = result.get("backup_name") + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == FROM_ENCODING + assert result.get("encoding").get("from") == TO_ENCODING results = hosts.all.shell(cmd=f"ls -la {backup_name[:-4]}*") for result in results.contacted.values(): @@ -1058,6 +1128,10 @@ def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): for content in enc_ds.contacted.values(): assert content.get("backup_name") is not None assert content.get("backup_name") == backup_data_set + assert content.get("encoding") is not None + assert isinstance(content.get("encoding"), dict) + assert content.get("encoding").get("to") == TO_ENCODING + assert content.get("encoding").get("from") == FROM_ENCODING hosts.all.zos_data_set(name=backup_data_set, state="absent") enc_ds = hosts.all.zos_encode( @@ -1074,6 +1148,10 @@ def test_return_backup_name_on_module_success_and_failure(ansible_zos_module): assert content.get("msg") is not None assert content.get("backup_name") is not None assert content.get("backup_name") == backup_data_set + assert content.get("encoding") is not None + assert isinstance(content.get("encoding"), dict) + assert content.get("encoding").get("to") == TO_ENCODING + assert content.get("encoding").get("from") == INVALID_ENCODING finally: hosts.all.zos_data_set(name=mvs_ps, state="absent") hosts.all.zos_data_set(name=backup_data_set, state="absent") @@ -1101,6 +1179,10 @@ def test_gdg_encoding_conversion_src_with_invalid_generation(ansible_zos_module, assert "not cataloged" in result.get("msg") assert result.get("backup_name") is None assert result.get("changed") is False + assert result.get("encoding") is not None + assert isinstance(result.get("encoding"), dict) + assert result.get("encoding").get("to") == TO_ENCODING + assert result.get("encoding").get("from") == FROM_ENCODING finally: hosts.all.shell(cmd=f"""drm "{ds_name}(0)" """) hosts.all.shell(cmd=f"drm {ds_name}") diff --git a/tests/functional/modules/test_zos_fetch_func.py b/tests/functional/modules/test_zos_fetch_func.py index b2f7a65fe1..5a29f7d709 100644 --- a/tests/functional/modules/test_zos_fetch_func.py +++ b/tests/functional/modules/test_zos_fetch_func.py @@ -182,6 +182,13 @@ def test_fetch_uss_file_not_present_on_local_machine(ansible_zos_module): assert result.get("data_set_type") == "USS" assert result.get("module_stderr") is None assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert "stdout" in result.keys() + assert "stdout_lines" in result.keys() + assert "stderr" in result.keys() + assert "stderr_lines" in result.keys() + assert "rc" is not None + assert isinstance(result.get("encoding"), dict) finally: if os.path.exists(dest_path): os.remove(dest_path) @@ -191,8 +198,9 @@ def test_fetch_uss_file_replace_on_local_machine(ansible_zos_module): with open("/tmp/profile", "w",encoding="utf-8") as file: file.close() hosts = ansible_zos_module + src = "/etc/profile" params = { - "src":"/etc/profile", + "src": src, "dest":"/tmp/", "flat":True } @@ -206,14 +214,17 @@ def test_fetch_uss_file_replace_on_local_machine(ansible_zos_module): assert result.get("checksum") != local_checksum assert result.get("module_stderr") is None assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: os.remove(dest_path) def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): hosts = ansible_zos_module + src = "/etc/profile" params = { - "src":"/etc/profile", + "src": src, "dest": "/tmp/", "flat":True } @@ -227,6 +238,8 @@ def test_fetch_uss_file_present_on_local_machine(ansible_zos_module): assert result.get("changed") is False assert result.get("checksum") == local_checksum assert result.get("module_stderr") is None + assert "msg" in result.keys() + assert result.get("src") is not None finally: os.remove(dest_path) @@ -256,6 +269,8 @@ def test_fetch_sequential_data_set_fixed_block(ansible_zos_module): assert result.get("module_stderr") is None assert result.get("dest") == dest_path assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): @@ -280,6 +295,8 @@ def test_fetch_sequential_data_set_variable_block(ansible_zos_module): assert result.get("module_stderr") is None assert result.get("dest") == dest_path assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: if os.path.exists(dest_path): os.remove(dest_path) @@ -308,6 +325,8 @@ def test_fetch_partitioned_data_set(ansible_zos_module): assert result.get("dest") == dest_path assert os.path.exists(dest_path) assert os.path.isdir(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PDS, state="absent") if os.path.exists(dest_path): @@ -329,7 +348,7 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): cmd=f"echo {quote(KSDS_CREATE_JCL.format(volume_1, test_vsam))} > {temp_jcl_path}/SAMPLE" ) hosts.all.zos_job_submit( - src=f"{temp_jcl_path}/SAMPLE", location="uss", wait_time_s=30 + src=f"{temp_jcl_path}/SAMPLE", remote_src=True, wait_time=30 ) hosts.all.shell(cmd=f"echo \"{TEST_DATA}\\c\" > {uss_file}") hosts.all.zos_encode( @@ -345,7 +364,7 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): "src":test_vsam, "dest":"/tmp/", "flat":True, - "is_binary":True + "binary":True } results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): @@ -357,6 +376,8 @@ def test_fetch_vsam_data_set(ansible_zos_module, volumes_on_systems): file = open(dest_path, 'r',encoding="utf-8") read_file = file.read() assert read_file == TEST_DATA + assert "msg" in result.keys() + assert result.get("src") is not None finally: if os.path.exists(dest_path): @@ -384,6 +405,8 @@ def test_fetch_vsam_empty_data_set(ansible_zos_module): assert result.get("module_stderr") is None assert result.get("dest") == dest_path assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=src_ds, state="absent") if os.path.exists(dest_path): @@ -401,7 +424,7 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): "src":TEST_PDS_MEMBER, "dest":"/tmp/", "flat":True, - "is_binary":True + "binary":True } dest_path = "/tmp/" + extract_member_name(TEST_PDS_MEMBER) try: @@ -411,9 +434,11 @@ def test_fetch_partitioned_data_set_member_in_binary_mode(ansible_zos_module): assert result.get("data_set_type") == "Partitioned" assert result.get("module_stderr") is None assert result.get("dest") == dest_path - assert result.get("is_binary") is True + assert result.get("binary") is True assert os.path.exists(dest_path) assert os.path.isfile(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PDS, state="absent") if os.path.exists(dest_path): @@ -435,7 +460,7 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): "src":TEST_PS, "dest":"/tmp/", "flat":True, - "is_binary":True + "binary":True } dest_path = "/tmp/" + TEST_PS try: @@ -444,8 +469,10 @@ def test_fetch_sequential_data_set_in_binary_mode(ansible_zos_module): assert result.get("changed") is True assert result.get("data_set_type") == "Sequential" assert result.get("module_stderr") is None - assert result.get("is_binary") is True + assert result.get("binary") is True assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): @@ -463,7 +490,7 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): "src":TEST_PDS, "dest":"/tmp/", "flat":True, - "is_binary":True + "binary":True } dest_path = "/tmp/" + TEST_PDS try: @@ -472,9 +499,11 @@ def test_fetch_partitioned_data_set_binary_mode(ansible_zos_module): assert result.get("changed") is True assert result.get("data_set_type") == "Partitioned" assert result.get("module_stderr") is None - assert result.get("is_binary") is True + assert result.get("binary") is True assert os.path.exists(dest_path) assert os.path.isdir(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PDS, state="absent") if os.path.exists(dest_path): @@ -500,6 +529,8 @@ def test_fetch_sequential_data_set_empty(ansible_zos_module): assert result.get("dest") == dest_path assert os.path.exists(dest_path) assert os.stat(dest_path).st_size == 0 + assert "msg" in result.keys() + assert result.get("src") is not None finally: if os.path.exists(dest_path): os.remove(dest_path) @@ -558,6 +589,8 @@ def test_fetch_partitioned_data_set_member_empty(ansible_zos_module): assert result.get("module_stderr") is None assert os.path.exists(dest_path) assert os.stat(dest_path).st_size == 0 + assert "msg" in result.keys() + assert result.get("src") is not None finally: if os.path.exists(dest_path): os.remove(dest_path) @@ -576,7 +609,7 @@ def test_fetch_missing_uss_file_does_not_fail(ansible_zos_module): results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): assert result.get("changed") is False - assert "note" in result.keys() + assert "msg" in result.keys() assert result.get("module_stderr") is None except Exception: raise @@ -610,7 +643,7 @@ def test_fetch_missing_mvs_data_set_does_not_fail(ansible_zos_module): results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): assert result.get("changed") is False - assert "note" in result.keys() + assert "msg" in result.keys() assert result.get("module_stderr") is None assert not os.path.exists("/tmp/FETCH.TEST.DATA.SET") except Exception: @@ -679,6 +712,7 @@ def test_fetch_sequential_data_set_replace_on_local_machine(ansible_zos_module): assert result.get("changed") is True assert result.get("module_stderr") is None assert checksum(dest_path, hash_func=sha256) != local_checksum + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): @@ -718,6 +752,7 @@ def test_fetch_partitioned_data_set_replace_on_local_machine(ansible_zos_module) assert result.get("changed") is True assert result.get("module_stderr") is None assert os.path.getmtime(dest_path) != prev_timestamp + assert result.get("src") is not None finally: if os.path.exists(dest_path): shutil.rmtree(dest_path) @@ -736,6 +771,7 @@ def test_fetch_uss_file_insufficient_write_permission_fails(ansible_zos_module): results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): assert "msg" in result.keys() + assert result.get("src") is not None dest_path.close() @@ -776,11 +812,12 @@ def test_fetch_use_data_set_qualifier(ansible_zos_module): try: results = hosts.all.zos_fetch(**params) for result in results.contacted.values(): - print(result) assert result.get("changed") is True assert result.get("data_set_type") == "Sequential" assert result.get("module_stderr") is None assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: if os.path.exists(dest_path): os.remove(dest_path) @@ -807,6 +844,8 @@ def test_fetch_flat_create_dirs(ansible_zos_module, z_python_interpreter): for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None + assert "msg" in result.keys() + assert result.get("src") is not None assert os.path.exists(dest_path) finally: if os.path.exists(dest_path): @@ -836,6 +875,8 @@ def test_fetch_sequential_data_set_with_special_chars(ansible_zos_module): assert result.get("module_stderr") is None assert result.get("dest") == dest_path assert os.path.exists(dest_path) + assert "msg" in result.keys() + assert result.get("src") is not None finally: hosts.all.zos_data_set(name=TEST_PS, state="absent") if os.path.exists(dest_path): @@ -862,6 +903,8 @@ def test_fetch_gds_from_gdg(ansible_zos_module, generation): assert result.get("changed") is True assert result.get("data_set_type") == "Sequential" assert result.get("module_stderr") is None + assert "msg" in result.keys() + assert result.get("src") is not None # Checking that we got a dest of the form: ANSIBLE.DATA.SET.G0001V01. dest_path = result.get("dest", "") @@ -894,6 +937,7 @@ def test_error_fetch_inexistent_gds(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("failed") is True + assert "msg" in result.keys() assert "does not exist" in result.get("msg", "") finally: @@ -920,6 +964,7 @@ def test_fetch_gdg(ansible_zos_module): assert result.get("changed") is True assert result.get("data_set_type") == "Generation Data Group" assert result.get("module_stderr") is None + assert "msg" in result.keys() # Checking that we got a dest of the form: ANSIBLE.DATA.SET.G0001V01. dest_path = result.get("dest", "") @@ -981,6 +1026,7 @@ def test_fetch_uss_file_relative_path_not_present_on_local_machine(ansible_zos_m assert result.get("module_stderr") is None assert dest == result.get("dest") dest = result.get("dest") + assert "msg" in result.keys() finally: if os.path.exists(dest): diff --git a/tests/functional/modules/test_zos_find_func.py b/tests/functional/modules/test_zos_find_func.py index 76c3071929..07075cd707 100644 --- a/tests/functional/modules/test_zos_find_func.py +++ b/tests/functional/modules/test_zos_find_func.py @@ -102,6 +102,7 @@ def test_find_gdg_data_sets(ansible_zos_module): assert len(val.get('data_sets')) == 1 assert {"name":gdg_a, "type": "GDG"} in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None find_res = hosts.all.zos_find( patterns=[f'{TEST_SUITE_HLQ}.*.*'], @@ -114,6 +115,7 @@ def test_find_gdg_data_sets(ansible_zos_module): assert len(val.get('data_sets')) == 1 assert {"name":gdg_b, "type": "GDG"} in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None find_res = hosts.all.zos_find( patterns=[f'{TEST_SUITE_HLQ}.*.*'], @@ -127,6 +129,7 @@ def test_find_gdg_data_sets(ansible_zos_module): assert len(val.get('data_sets')) == 1 assert {"name":gdg_c, "type": "GDG"} in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None finally: # Remove one by one to avoid using an HLQ.* cuz it could cause bugs when running in parallel. @@ -160,6 +163,7 @@ def test_find_sequential_data_sets_containing_single_string(ansible_zos_module): for ds in val.get('data_sets'): assert ds.get('name') in SEQ_NAMES assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None finally: hosts.all.zos_data_set( batch=[ @@ -199,6 +203,8 @@ def test_find_sequential_data_sets_multiple_patterns(ansible_zos_module): for ds in val.get('data_sets'): assert ds.get('name') in SEQ_NAMES assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -241,15 +247,17 @@ def test_find_pds_members_containing_string(ansible_zos_module): result = hosts.all.shell(cmd=f"decho '{search_string}' \"{ds}(MEMBER)\" ") find_res = hosts.all.zos_find( - pds_paths=[f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'], contains=search_string, - patterns=['.*'] + patterns=[f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'] ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) != 0 for ds in val.get('data_sets'): assert ds.get('name') in PDS_NAMES assert len(ds.get('members')) == 1 + assert val.get('matched') is not None + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -282,6 +290,9 @@ def test_exclude_data_sets_from_matched_list(ansible_zos_module): assert len(val.get('data_sets')) == 2 for ds in val.get('data_sets'): assert ds.get('name') in SEQ_NAMES + assert val.get('matched') is not None + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -322,14 +333,16 @@ def test_exclude_members_from_matched_list(ansible_zos_module): ] ) find_res = hosts.all.zos_find( - pds_paths=[f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'], - excludes=['.*FILE$'], - patterns=['.*'] + excludes=['(.*FILE$)'], + patterns=[f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'] ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 3 for ds in val.get('data_sets'): assert len(ds.get('members')) == 1 + assert val.get('matched') is not None + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -350,6 +363,8 @@ def test_find_data_sets_older_than_age(ansible_zos_module): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 assert val.get('matched') == 2 + assert val.get('examined') is not None + assert val.get('msg') is None @pytest.mark.parametrize("ds_type", DATASET_TYPES) @@ -364,6 +379,8 @@ def test_find_data_sets_larger_than_size(ansible_zos_module, ds_type): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 assert val.get('matched') == 2 + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set(name=TEST_PS1, state="absent") hosts.all.zos_data_set(name=TEST_PS2, state="absent") @@ -378,6 +395,8 @@ def test_find_data_sets_smaller_than_size(ansible_zos_module): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 assert val.get('matched') == 1 + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set(name=TEST_PS, state="absent") @@ -396,6 +415,8 @@ def test_find_data_sets_in_volume(ansible_zos_module, volumes_on_systems): for val in find_res.contacted.values(): assert len(val.get('data_sets')) >= 1 assert val.get('matched') >= 1 + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -419,6 +440,8 @@ def test_find_vsam_pattern(ansible_zos_module, volumes_on_systems): assert len(val.get('data_sets')) == 1 assert val.get('matched') == len(val.get('data_sets')) assert val.get('data_sets')[0].get("name", None) == VSAM_NAMES[0] + assert val.get('examined') is not None + assert val.get('msg') is None find_res = hosts.all.zos_find( patterns=[f'{TEST_SUITE_HLQ}.FIND.VSAM.FUNCTEST.*'], @@ -428,6 +451,8 @@ def test_find_vsam_pattern(ansible_zos_module, volumes_on_systems): assert len(val.get('data_sets')) == 1 assert val.get('matched') == len(val.get('data_sets')) assert val.get('data_sets')[0].get("name", None) == f"{VSAM_NAMES[0]}.DATA" + assert val.get('examined') is not None + assert val.get('msg') is None find_res = hosts.all.zos_find( patterns=[f'{TEST_SUITE_HLQ}.FIND.VSAM.FUNCTEST.*'], @@ -436,6 +461,8 @@ def test_find_vsam_pattern(ansible_zos_module, volumes_on_systems): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 2 assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None + assert val.get('msg') is None find_res = hosts.all.zos_find( patterns=[f'{TEST_SUITE_HLQ}.FIND.VSAM.FUNCTEST.*'], @@ -445,6 +472,8 @@ def test_find_vsam_pattern(ansible_zos_module, volumes_on_systems): assert len(val.get('data_sets')) == 1 assert val.get('matched') == len(val.get('data_sets')) assert val.get('data_sets')[0].get("name", None) == f"{VSAM_NAMES[0]}.INDEX" + assert val.get('examined') is not None + assert val.get('msg') is None find_res = hosts.all.zos_find( patterns=[f'{TEST_SUITE_HLQ}.FIND.VSAM.FUNCTEST.*'], @@ -454,6 +483,8 @@ def test_find_vsam_pattern(ansible_zos_module, volumes_on_systems): assert len(val.get('data_sets')) == 3 assert val.get('matched') == len(val.get('data_sets')) assert val.get('examined') == 1 + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -487,6 +518,8 @@ def test_find_vsam_pattern_disp_old(ansible_zos_module, volumes_on_systems): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.shell(cmd=f"drm '{jcl_ds}'") hosts.all.zos_data_set( @@ -516,6 +549,8 @@ def test_find_vsam_in_volume(ansible_zos_module, volumes_on_systems): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 1 assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -533,6 +568,7 @@ def test_find_invalid_age_indicator_fails(ansible_zos_module): find_res = hosts.all.zos_find(patterns=['some.pattern'], age='3s') for val in find_res.contacted.values(): assert val.get('msg') is not None + assert val.get('changed') is False def test_find_invalid_size_indicator_fails(ansible_zos_module): @@ -540,6 +576,7 @@ def test_find_invalid_size_indicator_fails(ansible_zos_module): find_res = hosts.all.zos_find(patterns=['some.pattern'], size='5h') for val in find_res.contacted.values(): assert val.get('msg') is not None + assert val.get('changed') is False def test_find_non_existent_data_sets(ansible_zos_module): @@ -548,17 +585,20 @@ def test_find_non_existent_data_sets(ansible_zos_module): for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 0 assert val.get('matched') == 0 + assert val.get('examined') is not None + assert val.get('msg') is None def test_find_non_existent_data_set_members(ansible_zos_module): hosts = ansible_zos_module find_res = hosts.all.zos_find( - pds_paths=[f'{TEST_SUITE_HLQ}.NONE.PDS.*'], - patterns=['.*'] + patterns=[f'{TEST_SUITE_HLQ}.NONE.PDS.*'], ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 0 assert val.get('matched') == 0 + assert val.get('examined') is not None + assert val.get('msg') is None def test_find_mixed_members_from_pds_paths(ansible_zos_module): @@ -590,14 +630,15 @@ def test_find_mixed_members_from_pds_paths(ansible_zos_module): ] ) find_res = hosts.all.zos_find( - pds_paths=[f'{TEST_SUITE_HLQ}.NONE.PDS.*',f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'], - excludes=['.*FILE$'], - patterns=['.*'] + excludes=['(.*FILE$)'], + patterns=[f'{TEST_SUITE_HLQ}.NONE.PDS.*',f'{TEST_SUITE_HLQ}.FIND.PDS.FUNCTEST.*'], ) for val in find_res.contacted.values(): assert len(val.get('data_sets')) == 3 for ds in val.get('data_sets'): assert len(ds.get('members')) == 1 + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -631,6 +672,8 @@ def test_find_sequential_special_data_sets_containing_single_string(ansible_zos_ for ds in special_names: assert {"name":ds, "type": "NONVSAM"} in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None + assert val.get('msg') is None finally: for ds in special_names: hosts.all.shell(cmd=f"drm '{ds}'") @@ -653,10 +696,12 @@ def test_find_vsam_and_gdg_data_sets(ansible_zos_module, volumes_on_systems): resource_type=['cluster', 'gdg'] ) for val in find_res.contacted.values(): - assert len(val.get('data_sets')) == 2 + assert len(val.get('data_sets')) >= 2 assert val.get('matched') == len(val.get('data_sets')) assert {"name":VSAM_NAMES[0], "type": "CLUSTER"} in val.get('data_sets') assert {"name":gdg_a, "type": "GDG"} in val.get('data_sets') + assert val.get('examined') is not None + assert val.get('msg') is None finally: hosts.all.zos_data_set( batch=[ @@ -689,11 +734,15 @@ def test_find_gdg_and_nonvsam_data_sets(ansible_zos_module): patterns=[f'{TEST_SUITE_HLQ}.*.*'], resource_type=["gdg", "nonvsam"], ) + data_sets = [{"name":data_set_name, "type": "NONVSAM"} for data_set_name in SEQ_NAMES] + data_sets.append({"name":gdg_b, "type": "GDG"}) for val in find_res.contacted.values(): assert val.get('msg') is None - assert len(val.get('data_sets')) == 4 - assert {"name":gdg_b, "type": "GDG"} in val.get('data_sets') + assert len(val.get('data_sets')) >= 4 + for data_set in data_sets: + assert data_set in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None finally: # Remove GDG. hosts.all.shell(cmd=f"drm {gdg_b}") @@ -732,9 +781,11 @@ def test_find_vsam_and_nonvsam_data_sets(ansible_zos_module, volumes_on_systems) ) for val in find_res.contacted.values(): assert val.get('msg') is None - assert len(val.get('data_sets')) == 4 + assert len(val.get('data_sets')) >= 4 assert {"name":f'{VSAM_NAMES[0]}.DATA', "type": "DATA"} in val.get('data_sets') assert val.get('matched') == len(val.get('data_sets')) + assert val.get('examined') is not None + assert val.get('msg') is None finally: # Remove VSAM. hosts.all.zos_data_set( @@ -766,6 +817,8 @@ def test_find_migrated_data_sets(ansible_zos_module): assert len(val.get('data_sets')) != 0 for ds in val.get('data_sets'): assert ds.get("type") == "MIGRATED" + assert val.get('examined') is not None + assert val.get('msg') is None def test_find_migrated_data_sets_with_excludes(ansible_zos_module): @@ -779,6 +832,8 @@ def test_find_migrated_data_sets_with_excludes(ansible_zos_module): assert len(val.get('data_sets')) != 0 for ds in val.get('data_sets'): assert not re.fullmatch(r".*F4", ds.get("name")) + assert val.get('examined') is not None + assert val.get('msg') is None def test_find_migrated_data_sets_with_migrated_type(ansible_zos_module): @@ -793,6 +848,8 @@ def test_find_migrated_data_sets_with_migrated_type(ansible_zos_module): for ds in val.get('data_sets'): assert ds.get("type") == "MIGRATED" assert ds.get("migrated_resource_type") == "NONVSAM" + assert val.get('examined') is not None + assert val.get('msg') is None def test_find_migrated_and_gdg_data_sets(ansible_zos_module): @@ -812,6 +869,8 @@ def test_find_migrated_and_gdg_data_sets(ansible_zos_module): assert {"name":gdg_a, "type": "GDG"} in val.get('data_sets') for ds in val.get('data_sets'): assert ds.get("type") in ["MIGRATED", "GDG"] + assert val.get('examined') is not None + assert val.get('msg') is None finally: # Remove GDG. hosts.all.shell(cmd=f"drm {gdg_a}") diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index d9799af81b..ae4930bc3e 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -16,6 +16,7 @@ __metaclass__ = type from shellescape import quote +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name JCL_FILE_CONTENTS = """//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, @@ -50,6 +51,9 @@ def test_zos_job_output_no_job_id(ansible_zos_module): results = hosts.all.zos_job_output(job_id="") for result in results.contacted.values(): assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("msg") is not None + assert result.get("failed") is True assert result.get("jobs") is None @@ -59,7 +63,9 @@ def test_zos_job_output_invalid_job_id(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("stderr") is not None + assert result.get("msg") is not None assert result.get("failed") is True + assert result.get("jobs") is None def test_zos_job_output_no_job_name(ansible_zos_module): @@ -67,6 +73,9 @@ def test_zos_job_output_no_job_name(ansible_zos_module): results = hosts.all.zos_job_output(job_name="") for result in results.contacted.values(): assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("msg") is not None + assert result.get("failed") is True assert result.get("jobs") is None @@ -75,7 +84,46 @@ def test_zos_job_output_invalid_job_name(ansible_zos_module): results = hosts.all.zos_job_output(job_name="INVALID") for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("subsystem") is None + assert job.get("system") is None + assert job.get("owner") is not None + assert job.get("cpu_time") is None + assert job.get("execution_node") is None + assert job.get("origin_node") is None + assert job.get("content_type") is None + assert job.get("creation_date") is None + assert job.get("creation_time") is None + assert job.get("execution_time") is None + assert job.get("job_class") is None + assert job.get("svc_class") is None + assert job.get("priority") is None + assert job.get("asid") is None + assert job.get("queue_position") is None + assert job.get("program_name") is None + assert job.get("class") is None + assert job.get("steps") is not None + assert job.get("dds") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is None + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") == "unavailable" + assert dds.get("record_count") == 0 + assert dds.get("id") is None + assert dds.get("stepname") is None + assert dds.get("procstep") is None + assert dds.get("byte_count") == 0 + assert dds.get("content") is None def test_zos_job_output_no_owner(ansible_zos_module): @@ -84,14 +132,20 @@ def test_zos_job_output_no_owner(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("msg") is not None + assert result.get("stderr") is not None + assert result.get("failed") is True + assert result.get("jobs") is None def test_zos_job_output_invalid_owner(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_output(owner="INVALID") for result in results.contacted.values(): - assert result.get("failed") is True + assert result.get("changed") is False assert result.get("stderr") is not None + assert result.get("msg") is not None + assert result.get("failed") is True + assert result.get("jobs") is None def test_zos_job_output_reject(ansible_zos_module): @@ -100,6 +154,9 @@ def test_zos_job_output_reject(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") is False assert result.get("msg") is not None + assert result.get("stderr") is not None + assert result.get("failed") is True + assert result.get("jobs") is None def test_zos_job_output_job_exists(ansible_zos_module): @@ -112,34 +169,101 @@ def test_zos_job_output_job_exists(ansible_zos_module): ) jobs = hosts.all.zos_job_submit( - src=f"{TEMP_PATH}/SAMPLE", location="uss", volume=None + src=f"{TEMP_PATH}/SAMPLE", remote_src=True, volume=None ) for job in jobs.contacted.values(): - print(job) + assert job.get("changed") is True + assert job.get("msg", False) is False assert job.get("jobs") is not None - for job in jobs.contacted.values(): - submitted_job_id = job.get("jobs")[0].get("job_id") - assert submitted_job_id is not None + job_ = job.get("jobs")[0] + assert job_.get("job_id") is not None + submitted_job_id = job_.get("job_id") + assert job_.get("job_name") is not None + assert job_.get("content_type") is not None + assert job_.get("duration") is not None + assert job_.get("execution_time") is not None + assert job_.get("job_class") is not None + assert job_.get("svc_class") is None + assert job_.get("priority") is not None + assert job_.get("asid") is not None + assert job_.get("creation_date") is not None + assert job_.get("creation_time") is not None + assert job_.get("queue_position") is not None + assert job_.get("program_name") is not None + + dds = job_.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job_.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job_.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" results = hosts.all.zos_job_output(job_id=submitted_job_id) # was SAMPLE?! for result in results.contacted.values(): - assert result.get("changed") is False + assert result.get("changed") is True + assert result.get("msg", False) is False assert result.get("jobs") is not None - assert result.get("jobs")[0].get("ret_code").get("steps") is not None - assert result.get("jobs")[0].get("ret_code").get("steps")[0].get("step_name") == "STEP0001" - assert result.get("jobs")[0].get("content_type") == "JOB" - assert result.get("jobs")[0].get("execution_time") is not None - assert "system" in result.get("jobs")[0] - assert "subsystem" in result.get("jobs")[0] - assert "cpu_time" in result.get("jobs")[0] - assert "execution_node" in result.get("jobs")[0] - assert "origin_node" in result.get("jobs")[0] + + job = result.get("jobs")[0] + assert job.get("job_id") == submitted_job_id + assert job.get("job_name") is not None + assert job.get("subsystem") is not None + assert job.get("system") is not None + assert job.get("owner") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("origin_node") is not None + assert job.get("content_type") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + assert job.get("class") is not None + assert job.get("steps") is not None + assert job.get("dds") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + finally: hosts.all.file(path=TEMP_PATH, state="absent") -def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): +def test_zos_job_output_job_exists_with_filtered_dd_name(ansible_zos_module): try: hosts = ansible_zos_module hosts.all.file(path=TEMP_PATH, state="directory") @@ -147,17 +271,55 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): cmd=f"echo {quote(JCL_FILE_CONTENTS)} > {TEMP_PATH}/SAMPLE" ) result = hosts.all.zos_job_submit( - src=f"{TEMP_PATH}/SAMPLE", location="uss", volume=None + src=f"{TEMP_PATH}/SAMPLE", remote_src=True, volume=None ) hosts.all.file(path=TEMP_PATH, state="absent") dd_name = "JESMSGLG" - results = hosts.all.zos_job_output(job_name="HELLO", ddname=dd_name) + results = hosts.all.zos_job_output(job_name="HELLO", dd_name=dd_name) for result in results.contacted.values(): - assert result.get("changed") is False + assert result.get("changed") is True + assert result.get("msg", False) is False assert result.get("jobs") is not None - for job in result.get("jobs"): - assert len(job.get("ddnames")) == 1 - assert job.get("ddnames")[0].get("ddname") == dd_name + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("subsystem") is not None + assert job.get("system") is not None + assert job.get("owner") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("origin_node") is not None + assert job.get("content_type") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + assert job.get("class") is not None + assert job.get("steps") is not None + assert job.get("dds") is not None + assert len(job.get("dds")) == 1 + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" + + dds = job.get("dds")[0] + assert dds.get("dd_name") == dd_name + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + finally: hosts.all.file(path=TEMP_PATH, state="absent") @@ -200,4 +362,44 @@ def test_zos_job_submit_job_id_and_owner_included(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_output(job_id="STC00*", owner="MASTER") for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_txt") is not None + assert result.get("changed") is False + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("subsystem") is None + assert job.get("system") is None + assert job.get("owner") is not None + assert job.get("cpu_time") is None + assert job.get("execution_node") is None + assert job.get("origin_node") is None + assert job.get("content_type") is None + assert job.get("creation_date") is None + assert job.get("creation_time") is None + assert job.get("execution_time") is None + assert job.get("job_class") is None + assert job.get("svc_class") is None + assert job.get("priority") is None + assert job.get("asid") is None + assert job.get("queue_position") is None + assert job.get("program_name") is None + assert job.get("class") is None + assert job.get("steps") is not None + assert job.get("dds") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is None + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") == "unavailable" + assert dds.get("record_count") == 0 + assert dds.get("id") is None + assert dds.get("stepname") is None + assert dds.get("procstep") is None + assert dds.get("byte_count") == 0 + assert dds.get("content") is None diff --git a/tests/functional/modules/test_zos_job_query_func.py b/tests/functional/modules/test_zos_job_query_func.py index 6f920cfea4..f21cd2c464 100644 --- a/tests/functional/modules/test_zos_job_query_func.py +++ b/tests/functional/modules/test_zos_job_query_func.py @@ -49,8 +49,36 @@ def test_zos_job_query_func(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_query(job_name="*", owner="*") for result in results.contacted.values(): - assert result.get("changed") is False + assert result.get("changed") is True assert result.get("jobs") is not None + assert result.get("msg", False) is False + + job = result.get("jobs")[0] + assert job.get("job_name") is not None + assert job.get("owner") is not None + assert job.get("job_id") is not None + assert job.get("content_type") is not None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("execution_node") is not None + assert job.get("cpu_time") is not None + assert job.get("job_class") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("program_name") is not None + assert job.get("execution_time") is not None + assert job.get("svc_class") is None + assert job.get("steps") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is not None + assert rc.get("code") is not None + assert rc.get("msg_code") is not None + assert rc.get("msg_txt") is not None + JCLQ_FILE_CONTENTS = """//HELLO JOB (T043JM,JM00,1,0,0,0),'HELLO WORLD - JRM',CLASS=R, // MSGCLASS=X,MSGLEVEL=1,NOTIFY=S0JM @@ -70,78 +98,188 @@ def test_zos_job_query_func(ansible_zos_module): def test_zos_job_id_query_multi_wildcards_func(ansible_zos_module): try: hosts = ansible_zos_module - jdata_set_name = get_tmp_ds_name() + data_set_name = get_tmp_ds_name() temp_path = get_random_file_name(dir=TEMP_PATH) hosts.all.file(path=temp_path, state="directory") hosts.all.shell( cmd=f"echo {quote(JCLQ_FILE_CONTENTS)} > {temp_path}/SAMPLE" ) - hosts.all.zos_data_set( - name=jdata_set_name, state="present", type="pds", replace=True - ) + hosts.all.shell(cmd=f"dtouch -tpds '{data_set_name}'") hosts.all.shell( - cmd=f"cp {temp_path}/SAMPLE \"//'{jdata_set_name}(SAMPLE)'\"" + cmd=f"cp {temp_path}/SAMPLE \"//'{data_set_name}(SAMPLE)'\"" ) results = hosts.all.zos_job_submit( - src=f"{jdata_set_name}(SAMPLE)", location="data_set", wait_time_s=10 + src=f"{data_set_name}(SAMPLE)", remote_src=True, wait_time=10 ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get("jobs")[0].get("execution_time") is not None + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None - fulljobid = result.get("jobs")[0].get("job_id") + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" + + fulljobid = job.get("job_id") jobmask = fulljobid[0:3] + '*' + fulljobid[5:6] + '*' qresults = hosts.all.zos_job_query(job_id=jobmask) for qresult in qresults.contacted.values(): + assert qresult.get("changed") is True assert qresult.get("jobs") is not None - assert qresult.get("jobs")[0].get("execution_time") is not None - assert qresult.get("jobs")[0].get("system") is not None - assert qresult.get("jobs")[0].get("subsystem") is not None - assert "cpu_time" in result.get("jobs")[0] - assert "execution_node" in result.get("jobs")[0] - assert "origin_node" in result.get("jobs")[0] + assert qresult.get("msg", False) is False + + job = qresult.get("jobs")[0] + assert job.get("job_name") is not None + assert job.get("owner") is not None + assert job.get("job_id") == fulljobid + assert job.get("content_type") is not None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("execution_node") is not None + assert job.get("cpu_time") is not None + assert job.get("job_class") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("program_name") is not None + assert job.get("execution_time") is not None + assert job.get("svc_class") is None + assert job.get("steps") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is not None + assert rc.get("msg_code") == "0000" + assert rc.get("code") == 0 + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") - hosts.all.zos_data_set(name=jdata_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{data_set_name}'") # test to show multi wildcard in Job_name query won't crash the search def test_zos_job_name_query_multi_wildcards_func(ansible_zos_module): try: hosts = ansible_zos_module - ndata_set_name = get_tmp_ds_name() + data_set_name = get_tmp_ds_name() temp_path = get_random_file_name(dir=TEMP_PATH) hosts.all.file(path=temp_path, state="directory") hosts.all.shell( cmd=f"echo {quote(JCLQ_FILE_CONTENTS)} > {temp_path}/SAMPLE" ) - hosts.all.zos_data_set( - name=ndata_set_name, state="present", type="pds", replace=True - ) + hosts.all.shell(cmd=f"dtouch -tpds '{data_set_name}'") hosts.all.shell( - cmd=f"cp {temp_path}/SAMPLE \"//'{ndata_set_name}(SAMPLE)'\"" + cmd=f"cp {temp_path}/SAMPLE \"//'{data_set_name}(SAMPLE)'\"" ) results = hosts.all.zos_job_submit( - src=f"{ndata_set_name}(SAMPLE)", location="data_set", wait_time_s=10 + src=f"{data_set_name}(SAMPLE)", remote_src=True, wait_time=10 ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get("jobs")[0].get("execution_time") is not None - assert result.get("jobs")[0].get("system") is not None - assert result.get("jobs")[0].get("subsystem") is not None + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" jobname = "HE*L*" qresults = hosts.all.zos_job_query(job_name=jobname, owner="*") for qresult in qresults.contacted.values(): + assert qresult.get("changed") is True assert qresult.get("jobs") is not None - assert qresult.get("jobs")[0].get("execution_time") is not None + assert qresult.get("msg", False) is False + job = qresult.get("jobs")[0] + assert job.get("job_name") == "HELLO" + assert job.get("owner") is not None + assert job.get("job_id") is not None + assert job.get("content_type") is not None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("execution_node") is not None + assert job.get("cpu_time") is not None + assert job.get("job_class") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("program_name") is not None + assert job.get("execution_time") is not None + assert job.get("svc_class") is None + assert job.get("steps") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is not None + assert rc.get("msg_code") == "0000" + assert rc.get("code") == 0 + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") - hosts.all.zos_data_set(name=ndata_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{data_set_name}'") def test_zos_job_id_query_short_ids_func(ansible_zos_module): @@ -150,7 +288,34 @@ def test_zos_job_id_query_short_ids_func(ansible_zos_module): job_id = get_job_id(hosts, len_id) qresults = hosts.all.zos_job_query(job_id=job_id) for qresult in qresults.contacted.values(): + assert qresult.get("changed") is True assert qresult.get("jobs") is not None + assert qresult.get("msg", False) is False + + job = qresult.get("jobs")[0] + assert job.get("job_name") is not None + assert job.get("owner") is not None + assert job.get("job_id") is not None + assert job.get("content_type") is not None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("execution_node") is not None + assert job.get("cpu_time") is not None + assert job.get("job_class") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("program_name") is not None + assert job.get("svc_class") is None + assert job.get("steps") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is not None + assert rc.get("msg_code") == "0000" + assert rc.get("code") == 0 + assert rc.get("msg_txt") == "CC" def test_zos_job_id_query_short_ids_with_wilcard_func(ansible_zos_module): @@ -167,5 +332,31 @@ def test_zos_job_id_query_short_ids_with_wilcard_func(ansible_zos_module): content_type = "JOB" for qresult in qresults.contacted.values(): + assert qresult.get("changed") is True assert qresult.get("jobs") is not None - assert qresult.get("jobs")[0].get("content_type") == content_type + assert qresult.get("msg", False) is False + + job = qresult.get("jobs")[0] + assert job.get("job_name") is not None + assert job.get("owner") is not None + assert job.get("job_id") is not None + assert job.get("content_type") == content_type + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("execution_node") is not None + assert job.get("cpu_time") is not None + assert job.get("job_class") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("program_name") is not None + assert job.get("svc_class") is None + assert job.get("steps") is not None + + rc = job.get("ret_code") + assert rc.get("msg") is not None + assert rc.get("msg_code") == "0000" + assert rc.get("code") == 0 + assert rc.get("msg_txt") == "CC" diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index e5f8fc7d5a..cd1bcf948e 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -416,7 +416,7 @@ - name: Submit async job. ibm.ibm_zos_core.zos_job_submit: src: {3} - location: local + remote_src: false async: 45 poll: 0 register: job_task @@ -475,22 +475,56 @@ def test_job_submit_pds(ansible_zos_module, location): ) if bool(location.get("default_location")): results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(data_set_name), wait_time_s=30 + src="{0}(SAMPLE)".format(data_set_name), remote_src=True, wait_time=30 ) else: results = hosts.all.zos_job_submit( - src="{0}(SAMPLE)".format(data_set_name), location="data_set", wait_time_s=30 + src="{0}(SAMPLE)".format(data_set_name), remote_src=True, wait_time=30 ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True - assert "system" in result.get("jobs")[0] - assert "subsystem" in result.get("jobs")[0] - assert "cpu_time" in result.get("jobs")[0] - assert "execution_node" in result.get("jobs")[0] - assert "origin_node" in result.get("jobs")[0] + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -515,12 +549,51 @@ def test_job_submit_pds_special_characters(ansible_zos_module): ) results = hosts.all.zos_job_submit( src="{0}(SAMPLE)".format(data_set_name_special_chars), - location="data_set", + remote_src=True, ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name_special_chars, state="absent") @@ -535,13 +608,51 @@ def test_job_submit_uss(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), temp_path) ) results = hosts.all.zos_job_submit( - src=f"{temp_path}/SAMPLE", location="uss", volume=None + src=f"{temp_path}/SAMPLE", remote_src=True, volume=None ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get("jobs")[0].get("content_type") == "JOB" assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") @@ -555,35 +666,41 @@ def test_job_submit_and_forget_uss(ansible_zos_module): cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), temp_path) ) results = hosts.all.zos_job_submit( - src=f"{temp_path}/SAMPLE", location="uss", volume=None, wait_time_s=0, + src=f"{temp_path}/SAMPLE", remote_src=True, volume=None, wait_time=0, ) for result in results.contacted.values(): - assert result.get("job_id") is not None assert result.get("changed") is True - assert len(result.get("jobs")) == 0 - assert result.get("job_name") is None - assert result.get("duration") is None - assert result.get("execution_time") is None - assert result.get("ddnames") is not None - assert result.get("ddnames").get("ddname") is None - assert result.get("ddnames").get("record_count") is None - assert result.get("ddnames").get("id") is None - assert result.get("ddnames").get("stepname") is None - assert result.get("ddnames").get("procstep") is None - assert result.get("ddnames").get("byte_count") is None - assert len(result.get("ddnames").get("content")) == 0 - assert result.get("ret_code") is not None - assert result.get("ret_code").get("msg") is None - assert result.get("ret_code").get("msg_code") is None - assert result.get("ret_code").get("code") is None - assert len(result.get("ret_code").get("steps")) == 0 - assert result.get("job_class") is None - assert result.get("svc_class") is None - assert result.get("priority") is None - assert result.get("asid") is None - assert result.get("creation_time") is None - assert result.get("queue_position") is None - assert result.get("program_name") is None + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is None + assert job.get("content_type") is None + assert job.get("duration") is None + assert job.get("execution_time") is None + assert job.get("job_class") is None + assert job.get("svc_class") is None + assert job.get("system") is None + assert job.get("subsystem") is None + assert job.get("origin_node") is None + assert job.get("cpu_time") is None + assert job.get("execution_node") is None + assert job.get("priority") is None + assert job.get("asid") is None + assert job.get("creation_date") is None + assert job.get("creation_time") is None + assert job.get("queue_position") is None + assert job.get("program_name") is None + assert job.get("dds") is not None + assert len(job.get("dds")) == 0 + assert job.get("steps") is not None + assert len(job.get("steps")) == 0 + rc = job.get("ret_code") + assert rc.get("msg") is None + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is None finally: hosts.all.file(path=temp_path, state="absent") @@ -593,13 +710,51 @@ def test_job_submit_local(ansible_zos_module): with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) + results = hosts.all.zos_job_submit(src=tmp_file.name, remote_src=False, wait_time=10) for result in results.contacted.values(): - print(result) - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" def test_job_submit_local_extra_r(ansible_zos_module): @@ -607,12 +762,51 @@ def test_job_submit_local_extra_r(ansible_zos_module): with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_BACKSLASH_R) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) + results = hosts.all.zos_job_submit(src=tmp_file.name, remote_src=False, wait_time=10) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" def test_job_submit_local_badjcl(ansible_zos_module): @@ -620,11 +814,12 @@ def test_job_submit_local_badjcl(ansible_zos_module): with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_BAD) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="local", wait_time_s=10) + results = hosts.all.zos_job_submit(src=tmp_file.name, remote_src=False, wait_time=10) for result in results.contacted.values(): - # Expecting: The job completion code (CC) was not in the job log....." assert result.get("changed") is False + assert result.get("msg") is not None + assert result.get("failed") is True assert re.search(r'completion code', repr(result.get("msg"))) @@ -655,13 +850,52 @@ def test_job_submit_pds_volume(ansible_zos_module, volumes_on_systems): results = hosts.all.zos_job_submit( src=data_set_name+"(SAMPLE)", - location="data_set", + remote_src=True, volume=volume_1 ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get('changed') is True + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -689,14 +923,51 @@ def test_job_submit_pds_5_sec_job_wait_15(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", - location="data_set", wait_time_s=wait_time_s) + remote_src=True, wait_time=wait_time_s) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get('changed') is True - assert result.get('duration') <= wait_time_s - assert result.get('execution_time') is not None + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") <= wait_time_s + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -724,18 +995,56 @@ def test_job_submit_pds_30_sec_job_wait_60(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", - location="data_set", wait_time_s=wait_time_s) + remote_src=True, wait_time=wait_time_s) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 - assert result.get('changed') is True - assert result.get('duration') <= wait_time_s - assert result.get('execution_time') is not None + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") <= wait_time_s + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") + def test_job_submit_pds_30_sec_job_wait_10_negative(ansible_zos_module): """This submits a 30 second job and only waits 10 seconds""" try: @@ -759,15 +1068,50 @@ def test_job_submit_pds_30_sec_job_wait_10_negative(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=data_set_name+"(BPXSLEEP)", - location="data_set", wait_time_s=wait_time_s) + remote_src=True, wait_time=wait_time_s) for result in results.contacted.values(): + assert result.get("changed") is False assert result.get("msg") is not None - assert result.get('changed') is False - assert result.get('duration') >= wait_time_s - # expecting at least "long running job that exceeded its maximum wait" + assert result.get("failed") is True assert re.search(r'exceeded', repr(result.get("msg"))) - assert result.get('execution_time') is not None + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") >= wait_time_s + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is not None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is None + assert len(job.get("steps")) == 0 + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") == 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") == 0 + assert dds.get("content") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "AC" + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=data_set_name, state="absent") @@ -797,22 +1141,23 @@ def test_job_submit_max_rc(ansible_zos_module, args): results = hosts.all.zos_job_submit( src=tmp_file.name, - location="local", + remote_src=False, max_rc=args["max_rc"], - wait_time_s=args["wait_time_s"] + wait_time=args["wait_time_s"] ) for result in results.contacted.values(): # Should fail normally as a non-zero RC will result in job submit failure if args["max_rc"] is None: + assert result.get("changed") is False assert result.get("msg") is not None - assert result.get('changed') is False + assert result.get("failed") is True # On busy systems, it is possible that the duration even for a job with a non-zero return code # will take considerable time to obtain the job log and thus you could see either error msg below #Expecting: - "The job return code 8 was non-zero in the job output, this job has failed" # - Consider using module zos_job_query to poll for a long running job or # increase option \\'wait_times_s` to a value greater than 10.", - duration = result.get('duration') + duration = result.get("jobs")[0].get('duration') if duration >= args["wait_time_s"]: re.search(r'long running job', repr(result.get("msg"))) @@ -821,13 +1166,14 @@ def test_job_submit_max_rc(ansible_zos_module, args): # Should fail with normally as well, job fails with an RC 8 yet max is set to 4 elif args["max_rc"] == 4: - assert result.get("msg") is not None - assert result.get('changed') is False # Expecting "The job return code, # 'ret_code[code]' 8 for the submitted job is greater # than the value set for option 'max_rc' 4. # Increase the value for 'max_rc' otherwise # this job submission has failed. + assert result.get("changed") is False + assert result.get("msg") is not None + assert result.get("failed") is True assert re.search( r'the submitted job is greater than the value set for option', repr(result.get("msg")) @@ -837,9 +1183,48 @@ def test_job_submit_max_rc(ansible_zos_module, args): # Will not fail and as the max_rc is set to 12 and the rc is 8 is a change true # there are other possibilities like an ABEND or JCL ERROR will fail this even # with a MAX RC - assert result.get("msg") is None - assert result.get('changed') is True - assert result.get("jobs")[0].get("ret_code").get("code") < 12 + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") < 12 + assert rc.get("msg_code") != "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=tmp_file.name, state="absent") @@ -909,15 +1294,54 @@ def test_job_submit_jinja_template(ansible_zos_module, args): results = hosts.all.zos_job_submit( src=tmp_file.name, - location="local", + remote_src=False, use_template=True, template_parameters=args["options"] ) for result in results.contacted.values(): - assert result.get('changed') is True - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: os.remove(tmp_file.name) @@ -933,16 +1357,55 @@ def test_job_submit_full_input(ansible_zos_module): ) results = hosts.all.zos_job_submit( src=f"{temp_path}/SAMPLE", - location="uss", + remote_src=True, volume=None, # This job used to set wait=True, but since it has been deprecated # and removed, it now waits up to 30 seconds. - wait_time_s=30 + wait_time=30 ) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") @@ -952,11 +1415,49 @@ def test_negative_job_submit_local_jcl_no_dsn(ansible_zos_module): with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_NO_DSN) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time_s=20, location="local") + results = hosts.all.zos_job_submit(src=tmp_file.name, wait_time=20, remote_src=False) for result in results.contacted.values(): assert result.get("changed") is False + assert result.get("msg") is not None assert re.search(r'completion code', repr(result.get("msg"))) - assert result.get("jobs")[0].get("job_id") is not None + assert result.get("failed") is True + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + assert len(job.get("steps")) == 0 + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "JCLERR" + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): @@ -964,16 +1465,53 @@ def test_negative_job_submit_local_jcl_invalid_user(ansible_zos_module): with open(tmp_file.name, "w",encoding="utf-8") as f: f.write(JCL_FILE_CONTENTS_INVALID_USER) hosts = ansible_zos_module - results = hosts.all.zos_job_submit(src=tmp_file.name, location="local") + results = hosts.all.zos_job_submit(src=tmp_file.name, remote_src=False) for result in results.contacted.values(): assert result.get("changed") is False assert re.search(r'please review the error for further details', repr(result.get("msg"))) assert re.search(r'please review the job log for status SEC', repr(result.get("msg"))) - assert result.get("jobs")[0].get("job_id") is not None + assert result.get("failed") is True + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is None + assert len(job.get("steps")) == 0 + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "SEC" + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None assert re.search( r'please review the job log for status SEC', - repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + repr(rc.get("msg_txt")) ) @@ -983,22 +1521,55 @@ def test_job_submit_local_jcl_typrun_scan(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_SCAN) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="local", - wait_time_s=20, + remote_src=False, + wait_time=20, encoding={ "from": "UTF-8", "to": "IBM-1047" },) for result in results.contacted.values(): assert result.get("changed") is False - assert result.get("jobs")[0].get("job_id") is not None + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is None + assert len(job.get("steps")) == 0 + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "TYPRUN=SCAN" + assert rc.get("code") is None + assert rc.get("msg_code") is None + assert rc.get("msg_txt") is not None assert re.search( r'run with special job processing TYPRUN=SCAN', - repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + repr(rc.get("msg_txt")) ) - assert result.get("jobs")[0].get("ret_code").get("code") is None - assert result.get("jobs")[0].get("ret_code").get("msg") == "TYPRUN=SCAN" - assert result.get("jobs")[0].get("ret_code").get("msg_code") is None def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): @@ -1007,8 +1578,8 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_COPY) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="local", - wait_time_s=20, + remote_src=False, + wait_time=20, encoding={ "from": "UTF-8", "to": "IBM-1047" @@ -1019,10 +1590,45 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): # When running a job with TYPRUN=COPY, a copy of the JCL will be kept in the JES spool, so # effectively, the system is changed even though the job didn't run. assert result.get("changed") is True - assert result.get("jobs")[0].get("job_id") is not None + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is None + assert len(job.get("steps")) == 0 + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "TYPRUN=COPY" + assert rc.get("code") is None + assert rc.get("msg_code") is None assert re.search( r'The job was run with TYPRUN=COPY.', - repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) + repr(rc.get("msg_txt")) ) assert result.get("jobs")[0].get("ret_code").get("code") is None assert result.get("jobs")[0].get("ret_code").get("msg") == 'TYPRUN=COPY' @@ -1035,15 +1641,40 @@ def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_HOLD) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="local", - wait_time_s=20, + remote_src=False, + wait_time=20, encoding={ "from": "UTF-8", "to": "IBM-1047" },) for result in results.contacted.values(): + print(result) assert result.get("changed") is False - assert result.get("jobs")[0].get("job_id") is not None + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is None + assert job.get("job_class") is not None + assert job.get("svc_class") is not None + assert job.get("system") is None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is None + assert len(job.get("steps")) == 0 + assert len(job.get("dds")) == 0 + + rc = job.get("ret_code") assert re.search( r'The job was run with TYPRUN=HOLD or TYPRUN=JCLHOLD', repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) @@ -1059,15 +1690,40 @@ def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): f.write(JCL_FILE_CONTENTS_TYPRUN_JCLHOLD) hosts = ansible_zos_module results = hosts.all.zos_job_submit(src=tmp_file.name, - location="local", - wait_time_s=20, + remote_src=False, + wait_time=20, encoding={ "from": "UTF-8", "to": "IBM-1047" },) for result in results.contacted.values(): + print(result) assert result.get("changed") is False - assert result.get("jobs")[0].get("job_id") is not None + assert result.get("jobs") is not None + job = result.get("jobs")[0] + + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is None + assert len(job.get("steps")) == 0 + assert len(job.get("dds")) == 0 + + rc = job.get("ret_code") assert re.search( r'The job was run with TYPRUN=HOLD or TYPRUN=JCLHOLD', repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) @@ -1099,11 +1755,50 @@ def test_job_from_gdg_source(ansible_zos_module, generation): cmd="dcp '{0}/SAMPLE' '{1}'".format(temp_path, gds_name) ) - results = hosts.all.zos_job_submit(src=gds_name, location="data_set") + results = hosts.all.zos_job_submit(src=gds_name, remote_src=True) for result in results.contacted.values(): - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") hosts.all.zos_data_set(name=f"{source}(0)", state="absent") @@ -1122,10 +1817,12 @@ def test_inexistent_negative_gds(ansible_zos_module): # Only creating generation 0. hosts.all.zos_data_set(name=f"{source}(+1)", state="present", type="seq") - results = hosts.all.zos_job_submit(src=gds_name, location="data_set") + results = hosts.all.zos_job_submit(src=gds_name, remote_src=True) for result in results.contacted.values(): assert result.get("changed") is False - assert "was not found" in result.get("msg") + assert result.get("msg") is not None + assert result.get("failed") is True + assert re.search(r'was not found', repr(result.get("msg"))) finally: hosts.all.zos_data_set(name=f"{source}(0)", state="absent") hosts.all.zos_data_set(name=source, state="absent") @@ -1142,10 +1839,12 @@ def test_inexistent_positive_gds(ansible_zos_module): # Only creating generation 0. hosts.all.zos_data_set(name=gds_name, state="present", type="seq") - results = hosts.all.zos_job_submit(src=gds_name, location="data_set") + results = hosts.all.zos_job_submit(src=gds_name, remote_src=True) for result in results.contacted.values(): assert result.get("changed") is False - assert "was not found" in result.get("msg") + assert result.get("msg") is not None + assert result.get("failed") is True + assert re.search(r'was not found', repr(result.get("msg"))) finally: hosts.all.zos_data_set(name=f"{source}(0)", state="absent") hosts.all.zos_data_set(name=source, state="absent") @@ -1173,16 +1872,55 @@ def test_zoau_bugfix_invalid_utf8_chars(ansible_zos_module): results = hosts.all.zos_job_submit( src=tmp_file.name, - location="local", - wait_time_s=15 + remote_src=False, + wait_time=15 ) for result in results.contacted.values(): # We shouldn't get an error now that ZOAU handles invalid/unprintable # UTF-8 chars correctly. - assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000" - assert result.get("jobs")[0].get("ret_code").get("code") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("jobs") is not None + + job = result.get("jobs")[0] + assert job.get("job_id") is not None + assert job.get("job_name") is not None + assert job.get("content_type") is not None + assert job.get("duration") is not None + assert job.get("execution_time") is not None + assert job.get("job_class") is not None + assert job.get("svc_class") is None + assert job.get("system") is not None + assert job.get("subsystem") is not None + assert job.get("origin_node") is not None + assert job.get("cpu_time") is not None + assert job.get("execution_node") is not None + assert job.get("priority") is not None + assert job.get("asid") is not None + assert job.get("creation_date") is not None + assert job.get("creation_time") is not None + assert job.get("queue_position") is not None + assert job.get("program_name") is not None + + dds = job.get("dds")[0] + assert dds.get("dd_name") is not None + assert dds.get("record_count") != 0 + assert dds.get("id") is not None + assert dds.get("stepname") is not None + assert dds.get("procstep") is not None + assert dds.get("byte_count") != 0 + assert dds.get("content") is not None + + step = job.get("steps")[0] + assert step.get("step_name") is not None + assert step.get("step_cc") is not None + + rc = job.get("ret_code") + assert rc.get("msg") == "CC" + assert rc.get("code") == 0 + assert rc.get("msg_code") == "0000" + assert rc.get("msg_txt") == "CC" finally: hosts.all.file(path=temp_path, state="absent") @@ -1215,7 +1953,7 @@ def test_job_submit_async(get_config): cut_python_path, python_version, tmp_file.name - )), + )), playbook.name )) @@ -1225,7 +1963,7 @@ def test_job_submit_async(get_config): ssh_key, user, python_path - )), + )), inventory.name )) @@ -1245,5 +1983,8 @@ def test_job_submit_async(get_config): assert result.returncode == 0 assert "ok=2" in result.stdout assert "changed=2" in result.stdout + # Commenting this assertion as this will cause a failure when a warning is displayed + # e.g. [WARNING]: Using force uses operations that are subject to race conditions and ... + # Which is a normal warning coming from zos_copy operation. assert result.stderr == "" diff --git a/tests/functional/modules/test_zos_lineinfile_func.py b/tests/functional/modules/test_zos_lineinfile_func.py index 5fe546264d..3e709a99ff 100644 --- a/tests/functional/modules/test_zos_lineinfile_func.py +++ b/tests/functional/modules/test_zos_lineinfile_func.py @@ -27,6 +27,17 @@ __metaclass__ = type +expected_keys = [ + 'changed', + 'cmd', + 'found', + 'stdout', + 'stdout_lines', + 'stderr', + 'stderr_lines', + 'rc' +] + c_pgm="""#include #include #include @@ -264,10 +275,10 @@ def remove_uss_environment(ansible_zos_module, file): def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content): hosts = ansible_zos_module hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_data_set(name=ds_name, type=ds_type) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_name}'") if ds_type in ["pds", "pdse"]: ds_full_name = ds_name + "(MEM)" - hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") + hosts.all.shell(cmd=f"decho '' '{ds_full_name}'") cmd_str = f"cp -CM {quote(temp_file)} \"//'{ds_full_name}'\"" else: ds_full_name = ds_name @@ -278,7 +289,7 @@ def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) def remove_ds_environment(ansible_zos_module, ds_name): hosts = ansible_zos_module - hosts.all.zos_data_set(name=ds_name, state="absent") + hosts.all.shell(cmd=f"drm '{ds_name}'") # supported data set types ds_type = ['seq', 'pds', 'pdse'] @@ -310,7 +321,7 @@ def test_uss_line_replace(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE @@ -334,7 +345,7 @@ def test_uss_line_insertafter_regex(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX @@ -358,7 +369,7 @@ def test_uss_line_insertbefore_regex(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX @@ -382,7 +393,7 @@ def test_uss_line_insertafter_eof(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF @@ -406,7 +417,7 @@ def test_uss_line_insertbefore_bof(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF @@ -431,7 +442,7 @@ def test_uss_line_replace_match_insertafter_ignore(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE @@ -456,7 +467,7 @@ def test_uss_line_replace_match_insertbefore_ignore(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE @@ -481,7 +492,7 @@ def test_uss_line_replace_nomatch_insertafter_match(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER @@ -506,7 +517,7 @@ def test_uss_line_replace_nomatch_insertbefore_match(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE @@ -531,7 +542,7 @@ def test_uss_line_replace_nomatch_insertafter_nomatch(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTAFTER_NOMATCH @@ -556,7 +567,7 @@ def test_uss_line_replace_nomatch_insertbefore_nomatch(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_NOMATCH_INSERTBEFORE_NOMATCH @@ -581,7 +592,7 @@ def test_uss_line_absent(ansible_zos_module): for result in results.contacted.values(): print(result) assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT @@ -604,7 +615,7 @@ def test_uss_advanced_regular_expression_absent(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT @@ -629,7 +640,7 @@ def test_uss_line_replace_quoted_escaped(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_QUOTED @@ -654,7 +665,7 @@ def test_uss_line_replace_quoted_not_escaped(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_QUOTED @@ -677,7 +688,7 @@ def test_uss_line_does_not_insert_repeated(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT @@ -716,7 +727,7 @@ def test_ds_line_insertafter_regex(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX @@ -746,7 +757,7 @@ def test_ds_line_insert_before_ansible_block(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_TEST_PARSING_CONTENT @@ -772,7 +783,7 @@ def test_ds_line_insertbefore_regex(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX @@ -799,7 +810,7 @@ def test_ds_line_insertafter_eof(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF @@ -825,7 +836,7 @@ def test_ds_line_insertbefore_bof(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF @@ -853,7 +864,7 @@ def test_ds_line_replace_match_insertafter_ignore(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER_IGNORE @@ -881,7 +892,7 @@ def test_ds_line_replace_match_insertbefore_ignore(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE_IGNORE @@ -904,7 +915,7 @@ def test_gds_ds_insert_line(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) cmd = result.get("cmd").split() for cmd_p in cmd: if ds_name in cmd_p: @@ -917,7 +928,7 @@ def test_gds_ds_insert_line(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) cmd = result.get("cmd").split() for cmd_p in cmd: if ds_name in cmd_p: @@ -932,7 +943,7 @@ def test_gds_ds_insert_line(ansible_zos_module): for result in results.contacted.values(): assert result.get("changed") == 1 assert result.get("rc") == 0 - assert "return_content" in result + assert all(key in result for key in expected_keys) backup = ds_name + "(0)" results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) for result in results.contacted.values(): @@ -942,7 +953,7 @@ def test_gds_ds_insert_line(ansible_zos_module): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) finally: hosts.all.shell(cmd="""drm "{0}*" """.format(ds_name)) @@ -955,13 +966,13 @@ def test_special_characters_ds_insert_line(ansible_zos_module): backup = get_tmp_ds_name(6, 6, symbols=True) try: # Set environment - result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") + hosts.all.shell(cmd=f"dtouch -tseq '{ds_name}'") params["src"] = ds_name results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) src = ds_name.replace('$', "\$") results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(src)) for result in results.contacted.values(): @@ -974,7 +985,7 @@ def test_special_characters_ds_insert_line(ansible_zos_module): print(result) assert result.get("changed") == 1 assert result.get("rc") == 0 - assert "return_content" in result + assert all(key in result for key in expected_keys) backup = backup.replace('$', "\$") results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) for result in results.contacted.values(): @@ -1100,7 +1111,7 @@ def test_ds_line_absent(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT @@ -1128,7 +1139,7 @@ def test_ds_tmp_hlq_option(ansible_zos_module): try: ds_full_name = get_tmp_ds_name() temp_file = get_random_file_name(dir=TMP_DIRECTORY) - hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_full_name}'") hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " hosts.all.shell(cmd=cmd_str) @@ -1139,11 +1150,11 @@ def test_ds_tmp_hlq_option(ansible_zos_module): params["path"] = ds_full_name results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): - assert "return_content" in result + assert all(key in result for key in expected_keys) for key in kwargs: assert kwargs.get(key) in result.get(key) finally: - hosts.all.zos_data_set(name=ds_full_name, state="absent") + hosts.all.shell(cmd=f"drm '{ds_full_name}'") ## Non supported test cases @@ -1160,7 +1171,7 @@ def test_ds_not_supported(ansible_zos_module, dstype): } try: ds_name = get_tmp_ds_name() + "." + ds_type - results = hosts.all.zos_data_set(name=ds_name, type=ds_type, replace='yes') + results = hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_name}'") for result in results.contacted.values(): assert result.get("changed") is True params["path"] = ds_name @@ -1169,7 +1180,7 @@ def test_ds_not_supported(ansible_zos_module, dstype): assert result.get("changed") is False assert result.get("msg") == "VSAM data set type is NOT supported" finally: - hosts.all.zos_data_set(name=ds_name, state="absent") + hosts.all.shell(cmd=f"drm '{ds_name}'") @pytest.mark.ds @@ -1194,22 +1205,12 @@ def test_ds_line_force(ansible_zos_module, dstype): params["path"] = f"{default_data_set_name}({member_2})" try: # set up: - hosts.all.zos_data_set( - name=default_data_set_name, - state="present", - type=ds_type, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{default_data_set_name}'") hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_data_set( - batch=[ - { "name": f"{default_data_set_name}({member_1})", - "type": "member", "state": "present", "replace": True, }, - { "name": params["path"], "type": "member", - "state": "present", "replace": True, }, - ] - ) - # write memeber to verify cases + # Create two empty members + hosts.all.shell(cmd=f"decho '' '{default_data_set_name}({member_1})'") + hosts.all.shell(cmd=f"decho '' '{params['path']}'") + # write member to verify cases if ds_type in ["pds", "pdse"]: cmd_str = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), params["path"]) else: @@ -1242,7 +1243,7 @@ def test_ds_line_force(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") hosts.all.shell(cmd='rm -r {0}'.format(path)) - hosts.all.zos_data_set(name=default_data_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{default_data_set_name}*'") @pytest.mark.ds @@ -1264,21 +1265,11 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): content = TEST_CONTENT try: # set up: - hosts.all.zos_data_set( - name=default_data_set_name, - state="present", - type=ds_type, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{default_data_set_name}'") hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_data_set( - batch=[ - { "name": f"{default_data_set_name}({member_1})", - "type": "member", "state": "present", "replace": True, }, - { "name": params["path"], "type": "member", - "state": "present", "replace": True, }, - ] - ) + # Create two empty members + hosts.all.shell(cmd=f"decho '' '{default_data_set_name}({member_1})'") + hosts.all.shell(cmd=f"decho '' '{params['path']}'") cmd_str = "cp -CM {0} \"//'{1}'\"".format(quote(temp_file), params["path"]) hosts.all.shell(cmd=cmd_str) results = hosts.all.shell(cmd="cat \"//'{0}'\" | wc -l ".format(params["path"])) @@ -1305,7 +1296,7 @@ def test_ds_line_force_fail(ansible_zos_module, dstype): pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") hosts.all.shell(cmd='rm -r {0}'.format(path)) - hosts.all.zos_data_set(name=default_data_set_name, state="absent") + hosts.all.shell(cmd=f"drm '{default_data_set_name}*'") @pytest.mark.ds @@ -1326,6 +1317,7 @@ def test_ds_line_does_not_insert_repeated(ansible_zos_module, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT @@ -1368,7 +1360,7 @@ def test_uss_encoding(ansible_zos_module, encoding): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) results = hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {encoding} {full_path}") for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ENCODING @@ -1399,10 +1391,10 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): try: hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") hosts.all.shell(cmd=f"iconv -f IBM-1047 -t {params['encoding']} temp_file > temp_file ") - hosts.all.zos_data_set(name=ds_name, type=ds_type) + hosts.all.shell(cmd=f"dtouch -t{ds_type} '{ds_name}'") if ds_type in ["pds", "pdse"]: ds_full_name = ds_name + "(MEM)" - hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") + hosts.all.shell(cmd=f"decho '' '{ds_full_name}'") cmd_str = f"cp -CM {quote(temp_file)} \"//'{ds_full_name}'\"" else: ds_full_name = ds_name @@ -1413,7 +1405,7 @@ def test_ds_encoding(ansible_zos_module, encoding, dstype): results = hosts.all.zos_lineinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert "return_content" in result + assert all(key in result for key in expected_keys) hosts.all.shell( cmd=f"iconv -f {encoding} -t IBM-1047 \"{ds_full_name}\" > \"{ds_full_name}\" " ) diff --git a/tests/functional/modules/test_zos_mount_func.py b/tests/functional/modules/test_zos_mount_func.py index 532c2f213e..6b9ba4e338 100644 --- a/tests/functional/modules/test_zos_mount_func.py +++ b/tests/functional/modules/test_zos_mount_func.py @@ -187,7 +187,7 @@ def test_remount(ansible_zos_module, volumes_on_systems): hosts.all.file(path="/pythonx/", state="absent") -def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_systems): +def test_basic_mount_with_bpx_nomarker_nobackup(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() @@ -198,7 +198,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ hosts.all.zos_copy( content=INITIAL_PRM_MEMBER, dest=tmp_file_filename, - is_binary=True, + binary=True, ) hosts.all.shell( cmd="chtag -t -c ISO8859-1 " + tmp_file_filename, @@ -209,19 +209,12 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ dest = get_tmp_ds_name() dest_path = dest + "(AUTO1)" - hosts.all.zos_data_set( - name=dest, - type="pdse", - space_primary=5, - space_type="m", - record_format="fba", - record_length=80, - ) + hosts.all.shell(cmd=f"dtouch -tpdse -s5M -IFBA -l80 {dest}") print("\nbnn-Copying {0} to {1}\n".format(tmp_file_filename, dest_path)) hosts.all.zos_copy( src=tmp_file_filename, dest=dest_path, - is_binary=True, + binary=True, remote_src=True, ) @@ -231,7 +224,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ path="/pythonx", fs_type="zfs", state="mounted", - persistent=dict(data_store=dest_path), + persistent=dict(name=dest_path), ) for result in mount_result.values(): @@ -252,15 +245,7 @@ def test_basic_mount_with_bpx_nocomment_nobackup(ansible_zos_module, volumes_on_ ) hosts.all.file(path=tmp_file_filename, state="absent") hosts.all.file(path="/pythonx/", state="absent") - hosts.all.zos_data_set( - name=dest, - state="absent", - type="pdse", - space_primary=5, - space_type="m", - record_format="fba", - record_length=80, - ) + hosts.all.shell(cmd=f"drm {dest}") def test_basic_mount_with_bpx_no_utf_8_characters(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module @@ -286,7 +271,7 @@ def test_basic_mount_with_bpx_no_utf_8_characters(ansible_zos_module, volumes_on hosts.all.zos_copy( src=tmp_file_filename, dest=dest_path, - is_binary=True, + binary=True, remote_src=True, ) @@ -296,7 +281,7 @@ def test_basic_mount_with_bpx_no_utf_8_characters(ansible_zos_module, volumes_on path="/pythonx", fs_type="zfs", state="mounted", - persistent=dict(data_store=dest_path), + persistent=dict(name=dest_path), ) for result in mount_result.values(): @@ -330,7 +315,7 @@ def test_basic_mount_with_bpx_no_utf_8_characters(ansible_zos_module, volumes_on stdin="", ) -def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_systems): +def test_basic_mount_with_bpx_marker_backup(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module volumes = Volume_Handler(volumes_on_systems) volume_1 = volumes.get_available_vol() @@ -341,7 +326,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst hosts.all.zos_copy( content=INITIAL_PRM_MEMBER, dest=tmp_file_filename, - is_binary=True, + binary=True, ) # Make it readable at console hosts.all.shell( @@ -365,20 +350,13 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst dest_path = dest + "(AUTO2)" back_dest_path = dest + "(AUTO2BAK)" - hosts.all.zos_data_set( - name=dest, - type="pdse", - space_primary=5, - space_type="m", - record_format="fba", - record_length=80, - ) + hosts.all.shell(cmd=f"dtouch -tpdse -s5M -IFBA -l80 {dest}") print("\nbcb-Copying {0} to {1}\n".format(tmp_file_filename, dest_path)) hosts.all.zos_copy( src=tmp_file_filename, dest=dest_path, - is_binary=True, + binary=True, remote_src=True, ) @@ -391,10 +369,10 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst fs_type="zfs", state="mounted", persistent=dict( - data_store=dest_path, + name=dest_path, backup="Yes", backup_name=back_dest_path, - comment=["bpxtablecomment - try this", "second line of comment"], + marker=["bpxtablemarker - try this", "second line of marker"], ), ) # copying from dataset to make editable copy on target @@ -403,7 +381,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst hosts.all.zos_copy( src=dest_path, dest=test_tmp_file_filename, - is_binary=True, + binary=True, remote_src=True, ) results = hosts.all.shell( @@ -421,7 +399,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst assert result.get("changed") is True assert srcfn in data - assert "bpxtablecomment - try this" in data + assert "bpxtablemarker - try this" in data finally: hosts.all.zos_mount( src=srcfn, @@ -438,15 +416,7 @@ def test_basic_mount_with_bpx_comment_backup(ansible_zos_module, volumes_on_syst hosts.all.file(path=tmp_file_filename, state="absent") hosts.all.file(path=test_tmp_file_filename, state="absent") hosts.all.file(path="/pythonx/", state="absent") - hosts.all.zos_data_set( - name=dest, - state="absent", - type="pdse", - space_primary=5, - space_type="m", - record_format="fba", - record_length=80, - ) + hosts.all.shell(cmd=f"drm {dest}") def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems): hosts = ansible_zos_module @@ -464,14 +434,14 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems) finally: tmphlq = "TMPHLQ" persist_data_set = get_tmp_ds_name() - hosts.all.zos_data_set(name=persist_data_set, state="present", type="seq") + hosts.all.shell(cmd=f"dtouch -tseq {persist_data_set}") unmount_result = hosts.all.zos_mount( src=srcfn, path="/pythonx", fs_type="zfs", state="absent", tmp_hlq=tmphlq, - persistent=dict(data_store=persist_data_set, backup=True) + persistent=dict(name=persist_data_set, backup=True) ) hosts.all.shell( cmd="drm " + DataSet.escape_data_set_name(srcfn), @@ -479,7 +449,7 @@ def test_basic_mount_with_tmp_hlq_option(ansible_zos_module, volumes_on_systems) stdin="", ) - hosts.all.zos_data_set(name=persist_data_set, state="absent") + hosts.all.shell(cmd=f"drm {persist_data_set}") for result in unmount_result.values(): assert result.get("rc") == 0 assert result.get("stdout") != "" diff --git a/tests/functional/modules/test_zos_operator_action_query_func.py b/tests/functional/modules/test_zos_operator_action_query_func.py index f8f521a286..a9de1c9074 100644 --- a/tests/functional/modules/test_zos_operator_action_query_func.py +++ b/tests/functional/modules/test_zos_operator_action_query_func.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2024 +# Copyright (c) IBM Corporation 2019, 2025 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -24,7 +24,7 @@ def test_zos_operator_action_query_no_options(ansible_zos_module): results = hosts.all.zos_operator_action_query() try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: @@ -32,14 +32,16 @@ def test_zos_operator_action_query_no_options(ansible_zos_module): for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None -def test_zos_operator_action_query_option_message_id(ansible_zos_module): +def test_zos_operator_action_query_option_msg_id(ansible_zos_module): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") - results = hosts.all.zos_operator_action_query(message_id="IEE094D") + results = hosts.all.zos_operator_action_query(msg_id="IEE094D") try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: @@ -47,40 +49,46 @@ def test_zos_operator_action_query_option_message_id(ansible_zos_module): for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None -def test_zos_operator_action_query_option_message_id_invalid_abbreviation( +def test_zos_operator_action_query_option_msg_id_invalid_abbreviation( ansible_zos_module ): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") - results = hosts.all.zos_operator_action_query(message_id="IEE") + results = hosts.all.zos_operator_action_query(msg_id="IEE") try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass for result in results.contacted.values(): - assert not result.get("actions") + assert result.get("actions") is not None + assert result.get("count") is not None + assert result.get("changed") is not None -@pytest.mark.parametrize("message_id", ["IEE*", "*"]) -def test_zos_operator_action_query_option_message_id_regex( +@pytest.mark.parametrize("msg_id", ["IEE*", "*"]) +def test_zos_operator_action_query_option_msg_id_regex( ansible_zos_module, - message_id + msg_id ): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") - results = hosts.all.zos_operator_action_query(message_id=message_id) + results = hosts.all.zos_operator_action_query(msg_id=msg_id) try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None def test_zos_operator_action_query_option_system(ansible_zos_module): hosts = ansible_zos_module @@ -91,6 +99,8 @@ def test_zos_operator_action_query_option_system(ansible_zos_module): results = hosts.all.zos_operator_action_query(system=system_name) for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None def test_zos_operator_action_query_option_system_invalid_abbreviation( ansible_zos_module @@ -102,12 +112,14 @@ def test_zos_operator_action_query_option_system_invalid_abbreviation( system_name = result.get("stdout", "").strip() results = hosts.all.zos_operator_action_query(system=system_name[:-1]) for result in results.contacted.values(): - assert not result.get("actions") + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None -@pytest.mark.parametrize("message_id", ["IEE*", "IEE094D", "*"]) -def test_zos_operator_action_query_option_system_and_message_id( +@pytest.mark.parametrize("msg_id", ["IEE*", "IEE094D", "*"]) +def test_zos_operator_action_query_option_system_and_msg_id( ansible_zos_module, - message_id + msg_id ): hosts = ansible_zos_module sysinfo = hosts.all.shell(cmd="uname -n") @@ -115,7 +127,7 @@ def test_zos_operator_action_query_option_system_and_message_id( for result in sysinfo.contacted.values(): system_name = result.get("stdout", "").strip() results = hosts.all.zos_operator_action_query( - system=system_name, message_id=message_id + system=system_name, msg_id=msg_id ) for result in results.contacted.values(): assert result.get("actions") @@ -131,18 +143,20 @@ def test_zos_operator_action_query_option_system_regex(ansible_zos_module): system=system_name[:3] + "*") try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None -@pytest.mark.parametrize("message_id", ["IEE*", "IEE094D", "*"]) -def test_zos_operator_action_query_option_system_regex_and_message_id( +@pytest.mark.parametrize("msg_id", ["IEE*", "IEE094D", "*"]) +def test_zos_operator_action_query_option_system_regex_and_msg_id( ansible_zos_module, - message_id + msg_id ): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") @@ -151,17 +165,19 @@ def test_zos_operator_action_query_option_system_regex_and_message_id( for result in sysinfo.contacted.values(): system_name = result.get("stdout", " ").strip() results = hosts.all.zos_operator_action_query( - system=system_name[:3] + "*", message_id=message_id + system=system_name[:3] + "*", msg_id=msg_id ) try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None @pytest.mark.parametrize("system", ["", "OVER8CHARS", "--BADNM", "invalid-system"]) def test_zos_operator_action_query_invalid_option_system( @@ -171,31 +187,37 @@ def test_zos_operator_action_query_invalid_option_system( hosts = ansible_zos_module results = hosts.all.zos_operator_action_query(system=system) for result in results.contacted.values(): - assert result.get("actions") is None + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None -@pytest.mark.parametrize("message_id", ["IEE*", "IEE094D", "*"]) -def test_zos_operator_action_query_valid_message_id_invalid_option_system( +@pytest.mark.parametrize("msg_id", ["IEE*", "IEE094D", "*"]) +def test_zos_operator_action_query_valid_msg_id_invalid_option_system( ansible_zos_module, - message_id + msg_id ): hosts = ansible_zos_module results = hosts.all.zos_operator_action_query( - system="invalid-system", message_id=message_id + system="invalid-system", msg_id=msg_id ) for result in results.contacted.values(): - assert result.get("actions") is None + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None -@pytest.mark.parametrize("message_id", ["", "--BADNM", "invalid-message"]) -def test_zos_operator_action_query_invalid_option_message_id( +@pytest.mark.parametrize("msg_id", ["", "--BADNM", "invalid-message"]) +def test_zos_operator_action_query_invalid_option_msg_id( ansible_zos_module, - message_id + msg_id ): hosts = ansible_zos_module - results = hosts.all.zos_operator_action_query(message_id=message_id) + results = hosts.all.zos_operator_action_query(msg_id=msg_id) for result in results.contacted.values(): - assert result.get("actions") is None + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None -def test_zos_operator_action_query_valid_option_system_invalid_option_message_id( +def test_zos_operator_action_query_valid_option_system_invalid_option_msg_id( ansible_zos_module ): hosts = ansible_zos_module @@ -204,66 +226,72 @@ def test_zos_operator_action_query_valid_option_system_invalid_option_message_id for result in sysinfo.contacted.values(): system_name = result.get("stdout", "").strip() results = hosts.all.zos_operator_action_query( - system=system_name, message_id="invalid-message" + system=system_name, msg_id="invalid-message" ) for result in results.contacted.values(): - assert result.get("actions") is None + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None def test_zos_operator_action_query_invalid_option_job_name(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator_action_query( job_name="invalid-job-name") for result in results.contacted.values(): - assert result.get("actions") is None + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None @pytest.mark.parametrize( - "message_filter", + "msg_filter", [ {"filter": "DUMP"}, - {"filter": "DUMP", "use_regex": False}, - {"filter": "^.*DUMP.*$", "use_regex": True}, - {"filter": "^.*OPERAND\\(S\\).*$", "use_regex": True} + {"filter": "DUMP", "literal": True}, + {"filter": "^.*DUMP.*$", "literal": False}, + {"filter": "^.*OPERAND\\(S\\).*$", "literal": False} ] ) -def test_zos_operator_action_query_option_message_filter_one_match( +def test_zos_operator_action_query_option_msg_filter_one_match( ansible_zos_module, - message_filter + msg_filter ): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") results = hosts.all.zos_operator_action_query( - message_filter=message_filter) + msg_filter=msg_filter) try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass for result in results.contacted.values(): assert result.get("actions") + assert result.get("count") is not None + assert result.get("changed") is not None @pytest.mark.parametrize( - "message_filter", + "msg_filter", [ {"filter": "DUMP"}, - {"filter": "DUMP", "use_regex": False}, - {"filter": "^.*DUMP.*$", "use_regex": True}, - {"filter": "^.*OPERAND\\(S\\).*$", "use_regex": True} + {"filter": "DUMP", "literal": True}, + {"filter": "^.*DUMP.*$", "literal": False}, + {"filter": "^.*OPERAND\\(S\\).*$", "literal": False} ] ) -def test_zos_operator_action_query_option_message_filter_multiple_matches( +def test_zos_operator_action_query_option_msg_filter_multiple_matches( ansible_zos_module, - message_filter + msg_filter ): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") results = hosts.all.zos_operator_action_query( - message_filter=message_filter) + msg_filter=msg_filter) try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: @@ -271,38 +299,44 @@ def test_zos_operator_action_query_option_message_filter_multiple_matches( for result in results.contacted.values(): assert result.get("actions") assert len(result.get("actions")) > 1 + assert result.get("count") is not None + assert result.get("changed") is not None @pytest.mark.parametrize( - "message_filter", + "msg_filter", [ {"filter": "IMS"}, - {"filter": "IMS", "use_regex": False}, - {"filter": "^.*IMS.*$", "use_regex": True}, + {"filter": "IMS", "literal": True}, + {"filter": "^.*IMS.*$", "literal": False}, ] ) -def test_zos_operator_action_query_option_message_filter_no_match( +def test_zos_operator_action_query_option_msg_filter_no_match( ansible_zos_module, - message_filter + msg_filter ): hosts = ansible_zos_module hosts.all.zos_operator(cmd="DUMP COMM=('test dump')") results = hosts.all.zos_operator_action_query( - message_filter=message_filter) + msg_filter=msg_filter) try: for action in results.get("actions"): - if "SPECIFY OPERAND(S) FOR DUMP" in action.get("message_text", ""): + if "SPECIFY OPERAND(S) FOR DUMP" in action.get("msg_txt", ""): hosts.all.zos_operator( cmd="{0}cancel".format(action.get("number"))) except Exception: pass for result in results.contacted.values(): - assert not result.get("actions") + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None -def test_zos_operator_action_query_invalid_option_message_filter( +def test_zos_operator_action_query_invalid_option_msg_filter( ansible_zos_module ): hosts = ansible_zos_module results = hosts.all.zos_operator_action_query( - message_filter={"filter": "*DUMP", "use_regex": True}) + msg_filter={"filter": "*DUMP", "literal": False}) for result in results.contacted.values(): - assert result.get("actions") is None + assert result.get("actions") == [] + assert result.get("count") is not None + assert result.get("changed") is not None diff --git a/tests/functional/modules/test_zos_operator_func.py b/tests/functional/modules/test_zos_operator_func.py index a2924f7bb9..bd5d871810 100644 --- a/tests/functional/modules/test_zos_operator_func.py +++ b/tests/functional/modules/test_zos_operator_func.py @@ -45,7 +45,7 @@ - name: zos_operator zos_operator: cmd: 'd a,all' - wait_time_s: 3 + wait_time: 3 verbose: true register: output @@ -112,15 +112,29 @@ def test_zos_operator_various_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd=command) for result in results.contacted.values(): - assert result["rc"] == expected_rc + print(result) + assert result.get("rc") == expected_rc assert result.get("changed") is changed + assert result.get("msg", False) is False + assert result.get("cmd") == command + assert result.get("elapsed") is not None + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" + assert result.get("content") is not None def test_zos_operator_invalid_command(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd="invalid,command", verbose=False) for result in results.contacted.values(): + print(result) assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("cmd") is not None + assert result.get("elapsed") is not None + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" + assert result.get("content") is not None def test_zos_operator_invalid_command_to_ensure_transparency(ansible_zos_module): @@ -128,6 +142,12 @@ def test_zos_operator_invalid_command_to_ensure_transparency(ansible_zos_module) results = hosts.all.zos_operator(cmd="DUMP COMM=('ERROR DUMP')", verbose=False) for result in results.contacted.values(): assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("cmd") is not None + assert result.get("elapsed") is not None + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" + assert result.get("content") is not None transparency = False if any('DUMP COMMAND' in str for str in result.get("content")): transparency = True @@ -139,8 +159,13 @@ def test_zos_operator_positive_path(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd="d u,all", verbose=False) for result in results.contacted.values(): - assert result["rc"] == 0 + assert result.get("rc") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("elapsed") is not None + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" assert result.get("content") is not None @@ -148,8 +173,13 @@ def test_zos_operator_positive_path_verbose(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_operator(cmd="d u,all", verbose=True) for result in results.contacted.values(): - assert result["rc"] == 0 + assert result.get("rc") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("elapsed") is not None + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" assert result.get("content") is not None # Traverse the content list for a known verbose keyword and track state is_verbose = False @@ -163,45 +193,55 @@ def test_zos_operator_positive_verbose_with_full_delay(ansible_zos_module): hosts = ansible_zos_module wait_time = 10 results = hosts.all.zos_operator( - cmd="RO *ALL,LOG 'dummy syslog message'", verbose=True, wait_time_s=wait_time + cmd="RO *ALL,LOG 'dummy syslog message'", verbose=True, wait_time=wait_time ) for result in results.contacted.values(): - assert result["rc"] == 0 + assert result.get("rc") == 0 assert result.get("changed") is True - assert result.get("content") is not None + assert result.get("msg", False) is False + assert result.get("cmd") is not None assert result.get("elapsed") > wait_time + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" + assert result.get("content") is not None def test_zos_operator_positive_verbose_with_quick_delay(ansible_zos_module): hosts = ansible_zos_module - wait_time_s=10 + wait_time=10 results = hosts.all.zos_operator( - cmd="d u,all", verbose=True, wait_time_s=wait_time_s + cmd="d u,all", verbose=True, wait_time=wait_time ) for result in results.contacted.values(): - assert result["rc"] == 0 + assert result.get("rc") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("elapsed") <= (2 * wait_time) + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" assert result.get("content") is not None - # Account for slower network - assert result.get('elapsed') <= (2 * wait_time_s) def test_zos_operator_positive_verbose_blocking(ansible_zos_module): hosts = ansible_zos_module if is_zoau_version_higher_than(hosts,"1.2.4.5"): - wait_time_s=5 + wait_time=5 results = hosts.all.zos_operator( - cmd="d u,all", verbose=True, wait_time_s=wait_time_s + cmd="d u,all", verbose=True, wait_time=wait_time ) for result in results.contacted.values(): - assert result["rc"] == 0 + assert result.get("rc") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("elapsed") >= wait_time + assert result.get("wait_time") is not None + assert result.get("time_unit") == "s" assert result.get("content") is not None - # Account for slower network - assert result.get('elapsed') >= wait_time_s def test_zos_operator_positive_path_preserve_case(ansible_zos_module): @@ -214,8 +254,13 @@ def test_zos_operator_positive_path_preserve_case(ansible_zos_module): ) for result in results.contacted.values(): - assert result["rc"] == 0 + assert result.get("rc") == 0 assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("wait_time") is not None + assert result.get("elapsed") is not None + assert result.get("time_unit") == "s" assert result.get("content") is not None # Making sure the output from opercmd logged the command # exactly as it was written. @@ -229,7 +274,15 @@ def test_response_come_back_complete(ansible_zos_module): res = {} res["stdout"] = [] for result in results.contacted.values(): + assert result.get("rc") == 0 + assert result.get("changed") is True + assert result.get("msg", False) is False + assert result.get("cmd") is not None + assert result.get("wait_time") is not None + assert result.get("elapsed") is not None + assert result.get("time_unit") == "s" + assert result.get("content") is not None stdout = result.get('content') # HASP646 Only appears in the last line that before did not appears last_line = len(stdout) - assert "HASP646" in stdout[last_line - 1] \ No newline at end of file + assert "HASP646" in stdout[last_line - 1] diff --git a/tests/functional/modules/test_zos_started_task_func.py b/tests/functional/modules/test_zos_started_task_func.py new file mode 100644 index 0000000000..2492f3b178 --- /dev/null +++ b/tests/functional/modules/test_zos_started_task_func.py @@ -0,0 +1,1116 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright (c) IBM Corporation 2025 +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.utils import get_random_file_name +from shellescape import quote +import re + +TMP_DIRECTORY = "/tmp/" +PROC_PDS = "USER.PRIVATE.PROCLIB" +TASK_JCL_CONTENT="""//STEP1 EXEC PGM=BPXBATCH +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//STDPARM DD * +SH sleep 600 +/*""" +PARAM_JCL_CONTENT="""//MSLEEP PROC SECS=10 +//STEP1 EXEC PGM=BPXBATCH, +// PARM='SH sleep &SECS' +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//""" + +# Input arguments validation +def test_start_task_with_invalid_member(ansible_zos_module): + hosts = ansible_zos_module + # Check with non-existing member + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "SAMTASK" + ) + for result in start_results.contacted.values(): + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("msg") is not None + # Validating with member name more than 8 chars + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "SAMPLETASK" + ) + for result in start_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("stderr") is not None + +def test_start_task_with_jobname_identifier(ansible_zos_module): + hosts = ansible_zos_module + # validate jobname and identifier with non-existing member + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "SAMPLE", + job_name = "SAMTASK", + identifier = "TESTER" + ) + for result in start_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + +def test_start_task_with_invalid_identifier(ansible_zos_module): + hosts = ansible_zos_module + # validate using invalid identifier + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "SAMPTASK", + identifier = "$HELLO" + ) + for result in start_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("stderr") is not None + assert result.get("msg") is not None + + # validate using proper identifier and non-existing member + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "SAMPLE", + identifier = "HELLO" + ) + for result in start_results.contacted.values(): + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "S SAMPLE.HELLO" + assert result.get("msg") is not None + +def test_start_task_with_invalid_jobaccount(ansible_zos_module): + hosts = ansible_zos_module + job_account = "(T043JM,JM00,1,0,0,This is the invalid job account information to test negative scenario)" + # validate invalid job_account with non-existing member + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "SAMPLE", + job_account = job_account + ) + for result in start_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + +# def test_start_task_with_invalid_devicenum(ansible_zos_module): +# hosts = ansible_zos_module +# # validate invalid devicenum with non-existing member +# start_results = hosts.all.zos_started_task( +# state = "started", +# member_name = "SAMPLE", +# device_number = "0870" +# ) +# for result in start_results.contacted.values(): +# assert result.get("changed") is False +# assert result.get("failed") is True +# assert result.get("msg") is not None + +# def test_start_task_with_invalid_volumeserial(ansible_zos_module): +# hosts = ansible_zos_module +# start_results = hosts.all.zos_started_task( +# state = "started", +# member_name = "SAMPLE", +# volume = "12345A" +# ) +# for result in start_results.contacted.values(): +# assert result.get("changed") is False +# assert result.get("stderr") is not None +# assert result.get("cmd") == "S SAMPLE.SAMPLE,,12345A" +# assert result.get("msg") is not None + +def test_start_task_with_invalid_parameters(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "SAMPLE", + parameters = ["KEY1"] + ) + for result in start_results.contacted.values(): + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "S SAMPLE,,,'KEY1'" + assert result.get("msg") is not None + + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "SAMPLE", + parameters = ["KEY1", "KEY2", "KEY3"] + ) + for result in start_results.contacted.values(): + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "S SAMPLE,,,(KEY1,KEY2,KEY3)" + assert result.get("msg") is not None + +# def test_start_task_with_devicenum_devicetype_negative(ansible_zos_module): +# hosts = ansible_zos_module +# start_results = hosts.all.zos_started_task( +# state = "started", +# member_name = "SAMPLE", +# device_number = "/0870", +# device_type = "TEST" +# ) +# for result in start_results.contacted.values(): +# assert result.get("changed") is False +# assert result.get("failed") is True +# assert result.get("msg") is not None + + +def test_start_task_with_invalid_subsystem_negative(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "VLF", + subsystem = "MSTRS" + ) + for result in start_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + +def test_start_task_with_invalid_keywordparams_negative(ansible_zos_module): + hosts = ansible_zos_module + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "VLF", + keyword_parameters = { + "key1key1key1key1key1key1key1key1": "value1value1value1value1value1value1" + } + ) + for result in start_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "VLF", + keyword_parameters = { + "key1key1key1key1key1key1key1key1key1key1key1key1": "value1" + } + ) + for result in start_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "VLF", + keyword_parameters = { + "KEY1": "VALUE1", + "KEY2": "VALUE2" + } + ) + for result in start_results.contacted.values(): + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == 'S VLF,KEY1=VALUE1,KEY2=VALUE2' + assert result.get("msg") is not None + assert result.get("verbose_output") == "" + + +# def test_start_task_using_nonexisting_devicenum_negative(ansible_zos_module): +# hosts = ansible_zos_module +# start_results = hosts.all.zos_started_task( +# state = "started", +# member_name = "SAMPLE", +# device_number = "/ABCD" +# ) +# for result in start_results.contacted.values(): +# assert result.get("changed") is False +# assert result.get("stderr") is not None +# assert result.get("cmd") == 'S SAMPLE.SAMPLE,/ABCD' +# assert result.get("msg") is not None +# assert result.get("verbose_output") == "" + +def test_display_task_negative(ansible_zos_module): + hosts = ansible_zos_module + display_results = hosts.all.zos_started_task( + state = "displayed", + identifier = "SAMPLE" + ) + for result in display_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + +def test_stop_task_negative(ansible_zos_module): + hosts = ansible_zos_module + stop_results = hosts.all.zos_started_task( + state = "stopped", + job_name = "SAMPLE" + ) + for result in stop_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("stderr") is not None + assert result.get("msg") is not None + + stop_results = hosts.all.zos_started_task( + state = "stopped", + job_name = "TESTER", + identifier = "SAMPLE" + ) + for result in stop_results.contacted.values(): + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "P TESTER.SAMPLE" + assert result.get("msg") is not None + +def test_modify_task_negative(ansible_zos_module): + hosts = ansible_zos_module + modify_results = hosts.all.zos_started_task( + state = "modified", + identifier = "SAMPLE" + ) + for result in modify_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + modify_results = hosts.all.zos_started_task( + state = "modified", + job_name = "TESTER" + ) + for result in modify_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + modify_results = hosts.all.zos_started_task( + state = "modified", + job_name = "TESTER", + identifier = "SAMPLE", + parameters = ["REPLACE", "VX=10"] + ) + for result in modify_results.contacted.values(): + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "F TESTER.SAMPLE,REPLACE,VX=10" + assert result.get("msg") is not None + +def test_cancel_task_negative(ansible_zos_module): + hosts = ansible_zos_module + cancel_results = hosts.all.zos_started_task( + state = "cancelled", + identifier = "SAMPLE" + ) + for result in cancel_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + cancel_results = hosts.all.zos_started_task( + state = "cancelled", + job_name = "TESTER", + identifier = "SAMPLE" + ) + for result in cancel_results.contacted.values(): + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "C TESTER.SAMPLE" + assert result.get("verbose_output") == "" + assert result.get("msg") is not None + + cancel_results = hosts.all.zos_started_task( + state = "cancelled", + asidx = "0012", + userid = "OMVSTEST", + dump = True, + verbose=True + ) + for result in cancel_results.contacted.values(): + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "C U=OMVSTEST,A=0012,DUMP" + assert result.get("verbose_output") != "" + assert result.get("msg") is not None + cancel_results = hosts.all.zos_started_task( + state = "cancelled", + userid = "OMVSADM", + armrestart = True + ) + for result in cancel_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + +def test_force_task_negative(ansible_zos_module): + hosts = ansible_zos_module + force_results = hosts.all.zos_started_task( + state = "forced", + identifier = "SAMPLE" + ) + for result in force_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + force_results = hosts.all.zos_started_task( + state = "forced", + job_name = "TESTER", + identifier = "SAMPLE" + ) + for result in force_results.contacted.values(): + assert result.get("changed") is False + assert result.get("stderr") is not None + assert result.get("cmd") == "FORCE TESTER.SAMPLE" + force_results = hosts.all.zos_started_task( + state = "forced", + userid = "OMVSADM", + armrestart = True + ) + for result in force_results.contacted.values(): + assert result.get("changed") is False + assert result.get("failed") is True + assert result.get("msg") is not None + + # force_results = hosts.all.zos_started_task( + # state = "forced", + # job_name = "TESTER", + # retry_force = True + # ) + # for result in force_results.contacted.values(): + # assert result.get("changed") is False + # assert result.get("failed") is True + # assert result.get("msg") is not None + # force_results = hosts.all.zos_started_task( + # state = "forced", + # job_name = "TESTER", + # tcb_address = "0006789", + # retry_force = True + # ) + # for result in force_results.contacted.values(): + # assert result.get("changed") is False + # assert result.get("failed") is True + # assert result.get("msg") is not None + # force_results = hosts.all.zos_started_task( + # state = "forced", + # job_name = "TESTER", + # identifier = "SAMPLE", + # tcb_address = "000678", + # retry_force = True + # ) + # for result in force_results.contacted.values(): + # assert result.get("changed") is False + # assert result.get("stderr") is not None + # assert result.get("cmd") == "FORCE TESTER.SAMPLE,TCB=000678,RETRY=YES" + # force_results = hosts.all.zos_started_task( + # state = "forced", + # userid = "OMVSTEST", + # tcb_address = "000678", + # retry_force = True, + # verbose=True + # ) + # for result in force_results.contacted.values(): + # assert result.get("changed") is False + # assert result.get("stderr") is not None + # assert result.get("cmd") == "FORCE U=OMVSTEST,TCB=000678,RETRY=YES" + # assert result.get("verbose_output") != "" + + +def test_start_and_cancel_zos_started_task(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) + hosts.all.file(path=temp_path, state="directory") + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(TASK_JCL_CONTENT), temp_path) + ) + + hosts.all.shell( + cmd="dcp {0}/SAMPLE {1}".format(temp_path, data_set_name) + ) + + hosts.all.shell( + cmd="dcp {0} '{1}(SAMPLE)'".format(data_set_name, PROC_PDS) + ) + + start_results = hosts.all.zos_started_task( + state = "started", + member_name = "SAMPLE", + verbose=True + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") != "" + + force_results = hosts.all.zos_started_task( + state = "forced", + task_name = "SAMPLE" + ) + for result in force_results.contacted.values(): + assert result.get("changed") is False + assert result.get("stderr") is not None + assert len(result.get("tasks")) > 0 + assert result.get("cmd") == "FORCE SAMPLE.SAMPLE" + assert result.get("msg") is not None + assert "CANCELABLE - ISSUE CANCEL BEFORE FORCE" in result.get("stderr") + + stop_results = hosts.all.zos_started_task( + state = "cancelled", + task_name = "SAMPLE" + ) + + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") == "" + + # validate identifier + start_results = hosts.all.zos_started_task( + state = "started", + member = "SAMPLE", + identifier = "TESTER", + reus_asid = True + ) + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") == "" + assert result.get("cmd") == "S SAMPLE.TESTER,REUSASID=YES" + + stop_results = hosts.all.zos_started_task( + state = "cancelled", + task_name = "SAMPLE" + ) + for result in stop_results.contacted.values(): + assert result.get("changed") is False + assert result.get("stderr") is not None + assert len(result.get("tasks")) == 0 + assert result.get("verbose_output") == "" + + stop_results = hosts.all.zos_started_task( + state = "cancelled", + task_name = "SAMPLE", + identifier = "TESTER" + ) + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") == "" + + job_account = "(T043JM,JM00,1,0,0,)" + start_results = hosts.all.zos_started_task( + state = "started", + member = "SAMPLE", + job_account = job_account + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") == "" + start_results = hosts.all.zos_started_task( + state = "started", + member = "SAMPLE", + job_account = job_account + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") == "" + + display_result = hosts.all.zos_started_task( + state = "displayed", + task = "SAMPLE" + ) + for result in display_result.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") == "" + + display_output = list(display_result.contacted.values())[0].get("stdout") + asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) + + stop_results = hosts.all.zos_started_task( + state = "cancelled", + task_name = "SAMPLE", + asidx = asid_val, + verbose=True + ) + + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") != "" + stop_results = hosts.all.zos_started_task( + state = "cancelled", + task_name = "SAMPLE", + verbose=True + ) + + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") != "" + + finally: + hosts.all.file(path=temp_path, state="absent") + hosts.all.shell( + cmd="drm {0}".format(data_set_name) + ) + # hosts.all.shell( + # cmd="mrm '{0}(SAMPLE)'".format(PROC_PDS) + # ) + +def test_start_with_jobname_and_cancel_zos_started_task(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) + hosts.all.file(path=temp_path, state="directory") + + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(TASK_JCL_CONTENT), temp_path) + ) + + hosts.all.shell( + cmd="dcp {0}/SAMPLE {1}".format(temp_path, data_set_name) + ) + + hosts.all.shell( + cmd="dcp {0} \"//'{1}(SAMPLE)'\"".format(data_set_name, PROC_PDS) + ) + + start_results = hosts.all.zos_started_task( + state = "started", + member = "SAMPLE", + job_name = "TESTTSK" + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + + stop_results = hosts.all.zos_started_task( + state = "cancelled", + task = "TESTTSK" + ) + + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + + finally: + hosts.all.file(path=temp_path, state="absent") + hosts.all.shell( + cmd="drm {0}".format(data_set_name) + ) + hosts.all.shell( + cmd="mrm '{0}(SAMPLE)'".format(PROC_PDS) + ) + +def test_stop_and_modify_with_vlf_task(ansible_zos_module): + hosts = ansible_zos_module + modify_results = hosts.all.zos_started_task( + state = "modified", + task = "VLF", + parameters = ["REPLACE" ,"NN=00"] + ) + for result in modify_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + assert result.get("cmd") == "F VLF.VLF,REPLACE,NN=00" + + display_result = hosts.all.zos_started_task( + state = "displayed", + task = "VLF" + ) + for result in display_result.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") == "" + + display_output = list(display_result.contacted.values())[0].get("stdout") + asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) + + stop_results = hosts.all.zos_started_task( + state = "stopped", + task = "VLF", + asidx = asid_val + ) + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + assert result.get("cmd") == f"P VLF.VLF,A={asid_val}" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "VLF", + identifier = "TESTER", + subsystem = "MSTR" + ) + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + + modify_results = hosts.all.zos_started_task( + state = "modified", + task = "VLF", + identifier = "TESTER", + parameters = ["REPLACE" ,"NN=00"] + ) + for result in modify_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("cmd") == "F VLF.TESTER,REPLACE,NN=00" + + stop_results = hosts.all.zos_started_task( + state = "stopped", + task = "VLF", + identifier = "TESTER" + ) + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + + start_results = hosts.all.zos_started_task( + state = "started", + member = "VLF", + subsystem = "MSTR" + ) + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert len(result.get("tasks")) > 0 + assert result.get("rc") == 0 + assert result.get("stderr") == "" + + +def test_starting_and_cancel_zos_started_task_with_params(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) + hosts.all.file(path=temp_path, state="directory") + + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(TASK_JCL_CONTENT), temp_path) + ) + + hosts.all.shell( + cmd="dcp {0}/SAMPLE {1}".format(temp_path, data_set_name) + ) + + hosts.all.shell( + cmd="dcp {0} \"//'{1}(SAMPLE2)'\"".format(data_set_name, PROC_PDS) + ) + + start_results = hosts.all.zos_started_task( + state = "started", + member = "SAMPLE2", + job_name = "SPROC", + verbose=True + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") != "" + + stop_results = hosts.all.zos_started_task( + state = "cancelled", + task = "SPROC" + ) + + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + + finally: + hosts.all.file(path=temp_path, state="absent") + hosts.all.shell( + cmd="drm {0}".format(data_set_name) + ) + hosts.all.shell( + cmd="mrm '{0}(SAMPLE2)'".format(PROC_PDS) + ) + +def test_force_and_start_with_icsf_task(ansible_zos_module): + hosts = ansible_zos_module + display_results = hosts.all.zos_started_task( + state = "displayed", + task = "ICSF" + ) + for result in display_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert result.get("cmd") == "D A,ICSF" + assert len(result.get("tasks")) > 0 + + cancel_results = hosts.all.zos_started_task( + state = "cancelled", + task = "ICSF" + ) + for result in cancel_results.contacted.values(): + assert result.get("changed") is False + assert result.get("rc") == 1 + assert result.get("stderr") != "" + assert len(result.get("tasks")) > 0 + + asidx = result.get("tasks")[0].get("asidx") + force_results = hosts.all.zos_started_task( + state = "forced", + task = "ICSF", + identifier = "ICSF", + asidx = asidx, + arm = True + ) + for result in force_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert result.get("cmd") == f"FORCE ICSF.ICSF,A={asidx},ARM" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "ICSF" + ) + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert result.get("cmd") == "S ICSF" + assert len(result.get("tasks")) > 0 + +def test_start_with_keyword_param_and_cancel_zos_started_task(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) + hosts.all.file(path=temp_path, state="directory") + + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(PARAM_JCL_CONTENT), temp_path) + ) + + hosts.all.shell( + cmd="dcp {0}/SAMPLE {1}".format(temp_path, data_set_name) + ) + + hosts.all.shell( + cmd="dcp {0} \"//'{1}(MSLEEP)'\"".format(data_set_name, PROC_PDS) + ) + display_results = hosts.all.zos_started_task( + state = "displayed", + task = "MSLEEP" + ) + for result in display_results.contacted.values(): + assert result.get("changed") is False + assert result.get("rc") == 1 + assert len(result.get("tasks")) == 0 + assert result.get("stderr") != "" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "MSLEEP", + keyword_parameters = {"SECS": "60"}, + verbose = True + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "MSLEEP", + keyword_parameters = {"SECS": "80"}, + verbose = True + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + + display_results = hosts.all.zos_started_task( + state = "displayed", + task = "MSLEEP" + ) + + for result in display_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 2 + assert result.get("stderr") == "" + + display_output = list(display_results.contacted.values())[0].get("stdout") + asid_val = re.search(r"\bA=([^ \n\r\t]+)", display_output).group(1) + stop_results = hosts.all.zos_started_task( + state = "cancelled", + task = "MSLEEP", + asidx = asid_val + ) + + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + + display_results = hosts.all.zos_started_task( + state = "displayed", + task = "MSLEEP" + ) + + for result in display_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + finally: + hosts.all.file(path=temp_path, state="absent") + hosts.all.shell( + cmd="drm {0}".format(data_set_name) + ) + hosts.all.shell( + cmd="mrm '{0}(MSLEEP)'".format(PROC_PDS) + ) +def test_start_and_cancel_zos_started_task_using_task_id(ansible_zos_module): + try: + hosts = ansible_zos_module + data_set_name = get_tmp_ds_name() + temp_path = get_random_file_name(dir=TMP_DIRECTORY) + hosts.all.file(path=temp_path, state="directory") + + hosts.all.shell( + cmd="echo {0} > {1}/SAMPLE".format(quote(PARAM_JCL_CONTENT), temp_path) + ) + + hosts.all.shell( + cmd="dcp {0}/SAMPLE {1}".format(temp_path, data_set_name) + ) + + hosts.all.shell( + cmd="dcp {0} \"//'{1}(TSLEEP)'\"".format(data_set_name, PROC_PDS) + ) + display_results = hosts.all.zos_started_task( + state = "displayed", + task_id = "STCABCDEF" + ) + for result in display_results.contacted.values(): + assert result.get("changed") is False + assert result.get("rc") == 4 + assert result.get("msg") != "" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "TSLEEP", + keyword_parameters = {"SECS": "60"}, + verbose = True + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "TSLEEP", + keyword_parameters = {"SECS": "80"}, + verbose = True + ) + + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + + display_results = hosts.all.zos_started_task( + state = "displayed", + task = "TSLEEP" + ) + + for result in display_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 2 + assert result.get("stderr") == "" + + display_output = list(display_results.contacted.values())[0].get("stdout") + task_id = re.search(r"\bWUID=([^ \n\r\t]+)", display_output).group(1) + stop_results = hosts.all.zos_started_task( + state = "cancelled", + task_id = task_id + ) + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + display_results = hosts.all.zos_started_task( + state = "displayed", + task = "TSLEEP" + ) + + for result in display_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) == 1 + assert result.get("stderr") == "" + finally: + hosts.all.file(path=temp_path, state="absent") + hosts.all.shell( + cmd="drm {0}".format(data_set_name) + ) + hosts.all.shell( + cmd="mrm '{0}(TSLEEP)'".format(PROC_PDS) + ) + +def test_stop_and_force_with_ICSF_task_using_task_id(ansible_zos_module): + hosts = ansible_zos_module + display_result = hosts.all.zos_started_task( + state = "displayed", + task = "ICSF" + ) + for result in display_result.contacted.values(): + task_id = result.get('tasks')[0]['task_id'] + asid_val = result.get('tasks')[0]['asidx'] + assert result.get("changed") is True + assert result.get("rc") == 0 + assert result.get("stderr") == "" + assert len(result.get("tasks")) > 0 + assert result.get("verbose_output") == "" + stop_results = hosts.all.zos_started_task( + state = "stopped", + task_id = task_id + ) + for result in stop_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + assert result.get("cmd") == f"P ICSF.ICSF,A={asid_val}" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "ICSF" + ) + for result in start_results.contacted.values(): + task_id = result.get('tasks')[0]['task_id'] + asid_val = result.get('tasks')[0]['asidx'] + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + modify_results = hosts.all.zos_started_task( + state = "modified", + task_id = task_id, + parameters = ["REFRESH"] + ) + for result in modify_results.contacted.values(): + task_id = result.get('tasks')[0]['task_id'] + asid_val = result.get('tasks')[0]['asidx'] + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + force_results = hosts.all.zos_started_task( + state = "forced", + task_id = task_id, + arm = True + ) + for result in force_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + assert result.get("cmd") == f"FORCE ICSF.ICSF,A={asid_val},ARM" + + start_results = hosts.all.zos_started_task( + state = "started", + member = "ICSF" + ) + for result in start_results.contacted.values(): + assert result.get("changed") is True + assert result.get("rc") == 0 + assert len(result.get("tasks")) > 0 + assert result.get("stderr") == "" + +# This testcase will be successful when a TSO session with user 'OMVSADM' is open. +# def test_cancel_using_userid(ansible_zos_module): +# hosts = ansible_zos_module +# display_results = hosts.all.zos_started_task( +# state = "cancelled", +# userid = "OMVSADM" +# ) +# for result in display_results.contacted.values(): +# print(result) +# assert result.get("changed") is True +# assert result.get("rc") == 0 +# assert result.get("stderr") == "" +# assert result.get("cmd") == "C U=OMVSADM" +# assert len(result.get("tasks")) > 0 diff --git a/tests/functional/modules/test_zos_tso_command_func.py b/tests/functional/modules/test_zos_tso_command_func.py index cacfd99c12..e2e62c2b23 100644 --- a/tests/functional/modules/test_zos_tso_command_func.py +++ b/tests/functional/modules/test_zos_tso_command_func.py @@ -23,6 +23,11 @@ def test_zos_tso_command_run_help(ansible_zos_module): for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == "help" assert result.get("changed") is True @@ -43,6 +48,11 @@ def test_zos_tso_command_long_command_128_chars(ansible_zos_module): for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == command_string[0] assert result.get("changed") is True @@ -56,43 +66,79 @@ def test_zos_tso_command_allocate_listing_delete(ansible_zos_module): results_allocate = hosts.all.zos_tso_command(commands=command_string) # Validate the correct allocation of dataset for result in results_allocate.contacted.values(): - for item in result.get("output"): + for index, item in enumerate(result.get("output")): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") == "" + assert item.get("command") == command_string[index] assert result.get("changed") is True # Validate listds of datasets and validate LISTDS using alias param 'command' of auth command - results = hosts.all.zos_tso_command(commands=[f"LISTDS '{default_temp_dataset}'"]) + cmd = f"LISTDS '{default_temp_dataset}'" + results = hosts.all.zos_tso_command(commands=[cmd]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == cmd assert result.get("changed") is True # Validate LISTDS using alias param 'command' - results = hosts.all.zos_tso_command(command=f"LISTDS '{default_temp_dataset}'") + cmd = f"LISTDS '{default_temp_dataset}'" + results = hosts.all.zos_tso_command(command=cmd) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == cmd assert result.get("changed") is True # Validate LISTCAT command and an unauth command + cmd = f"LISTCAT ENT('{default_temp_dataset}')" results = hosts.all.zos_tso_command( - commands=[f"LISTCAT ENT('{default_temp_dataset}')"] + commands=[cmd] ) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == cmd assert result.get("changed") is True # Validate remove dataset - results = hosts.all.zos_tso_command(commands=[f"delete '{default_temp_dataset}'"]) + cmd = f"delete '{default_temp_dataset}'" + results = hosts.all.zos_tso_command(commands=[cmd]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == cmd assert result.get("changed") is True # Expect the tso_command to fail here because # the previous command will have already deleted the data set # Validate data set was removed by previous call - results = hosts.all.zos_tso_command(commands=[f"delete '{default_temp_dataset}'"]) + cmd = f"delete '{default_temp_dataset}'" + results = hosts.all.zos_tso_command(commands=[cmd]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 8 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == cmd assert result.get("changed") is False + assert result.get("failed") is True # The failure test @@ -103,18 +149,31 @@ def test_zos_tso_command_empty_command(ansible_zos_module): for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 255 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == "" assert result.get("changed") is False + assert result.get("failed") is True # The failure test # The input command is no-existing command, the module return rc 255. def test_zos_tso_command_invalid_command(ansible_zos_module): hosts = ansible_zos_module - results = hosts.all.zos_tso_command(commands=["xxxxxx"]) + cmd = "xxxxxx" + results = hosts.all.zos_tso_command(commands=[]) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") == 255 - assert result.get("changed") is False + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") == "" + assert item.get("command") == cmd + assert result.get("changed") is True + assert result.get("failed", False) is False # The positive test @@ -127,11 +186,13 @@ def test_zos_tso_command_multiple_commands(ansible_zos_module): commands_list = ["LU {0}".format(user), "LISTGRP"] results = hosts.all.zos_tso_command(commands=commands_list) for result in results.contacted.values(): - for item in result.get("output"): - if item.get("command") == "LU {0}".format(user): - assert item.get("rc") == 0 - if item.get("command") == "LISTGRP": - assert item.get("rc") == 0 + for index, item in enumerate(result.get("output")): + assert item.get("rc") == 0 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == commands_list[index] assert result.get("changed") is True @@ -139,13 +200,19 @@ def test_zos_tso_command_multiple_commands(ansible_zos_module): # The command that kicks off rc>0 which is allowed def test_zos_tso_command_maxrc(ansible_zos_module): hosts = ansible_zos_module + cmd = "LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC" results = hosts.all.zos_tso_command( - commands=["LISTDSD DATASET('HLQ.DATA.SET') ALL GENERIC"], + commands=[cmd], max_rc=4 ) for result in results.contacted.values(): for item in result.get("output"): assert item.get("rc") < 5 + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + assert item.get("command") == cmd assert result.get("changed") is True @@ -158,25 +225,46 @@ def test_zos_tso_command_gds(ansible_zos_module): hosts.all.shell(cmd="dtouch -tseq '{0}(+1)' ".format(default_data_set)) print(f"data set name {default_data_set}") hosts = ansible_zos_module + cmd = """LISTDSD DATASET('{0}(0)') ALL GENERIC""".format(default_data_set) results = hosts.all.zos_tso_command( - commands=["""LISTDSD DATASET('{0}(0)') ALL GENERIC""".format(default_data_set)], + commands=[cmd], max_rc=4 ) for result in results.contacted.values(): for item in result.get("output"): assert result.get("changed") is True + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + # command has to be different because the GDS name gets resolved + assert item.get("command") != cmd + cmd = """LISTDSD DATASET('{0}(-1)') ALL GENERIC""".format(default_data_set) results = hosts.all.zos_tso_command( - commands=["""LISTDSD DATASET('{0}(-1)') ALL GENERIC""".format(default_data_set)], + commands=[cmd], max_rc=4 ) for result in results.contacted.values(): for item in result.get("output"): assert result.get("changed") is True + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + # command has to be different because the GDS name gets resolved + assert item.get("command") != cmd + cmd = """LISTDS '{0}(-1)'""".format(default_data_set) results = hosts.all.zos_tso_command( - commands=["""LISTDS '{0}(-1)'""".format(default_data_set)] + commands=[cmd] ) for result in results.contacted.values(): + for item in result.get("output"): + assert item.get("line_count") > 0 + assert len(item.get("stdout_lines")) > 0 + assert item.get("stderr") == "" + assert item.get("stdout") != "" + # command has to be different because the GDS name gets resolved + assert item.get("command") != cmd assert result.get("changed") is True finally: - None - # hosts.all.shell(cmd="drm ANSIBLE.*".format(default_data_set)) \ No newline at end of file + hosts.all.shell(cmd=f"drm -f {default_data_set}") \ No newline at end of file diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 5b4aff3dfd..1c97815549 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -62,7 +62,7 @@ ibm.ibm_zos_core.zos_unarchive: src: {3} format: - name: {4} + type: {4} remote_src: True async: 45 poll: 0 @@ -98,17 +98,11 @@ def create_multiple_data_sets(ansible_zos_module, base_name, n, ds_type, ): for i in range(n): curr_ds = { "name":base_name+str(i), - "type":ds_type, - "state":"present", - "replace":True, - "force":True } + ansible_zos_module.all.shell(cmd=f"dtouch -t{ds_type} '{base_name+str(i)}'") test_data_sets.append(curr_ds) # Create data sets in batch - ansible_zos_module.all.zos_data_set( - batch=test_data_sets - ) return test_data_sets @@ -117,15 +111,9 @@ def create_multiple_members(ansible_zos_module, pds_name, member_base_name, n): for i in range(n): curr_ds = { "name":f"{pds_name}({member_base_name}{i})", - "type":"member", - "state":"present", - "replace":True, - "force":True } + ansible_zos_module.all.shell(cmd=f"decho '' '{pds_name}({member_base_name}{i})'") test_members.append(curr_ds) - ansible_zos_module.all.zos_data_set( - batch=test_members - ) return test_members @@ -159,7 +147,7 @@ def test_uss_unarchive(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -168,7 +156,7 @@ def test_uss_unarchive(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, remote_src=True, ) @@ -177,6 +165,8 @@ def test_uss_unarchive(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None # Command to assert the file is in place cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") for c_result in cmd_result.contacted.values(): @@ -198,7 +188,7 @@ def test_uss_unarchive_include(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) uss_files = [file[len(USS_TEMP_DIR)+1:] for file in USS_TEST_FILES] @@ -209,7 +199,7 @@ def test_uss_unarchive_include(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, include=include_list, remote_src=True, @@ -218,6 +208,8 @@ def test_uss_unarchive_include(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None # Command to assert the file is in place cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") for c_result in cmd_result.contacted.values(): @@ -242,7 +234,7 @@ def test_uss_unarchive_exclude(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -253,7 +245,7 @@ def test_uss_unarchive_exclude(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, exclude=exclude_list, remote_src=True, @@ -261,6 +253,9 @@ def test_uss_unarchive_exclude(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False + assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None # Command to assert the file is in place cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") for c_result in cmd_result.contacted.values(): @@ -285,7 +280,7 @@ def test_uss_unarchive_list(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -294,7 +289,7 @@ def test_uss_unarchive_list(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, remote_src=True, ) @@ -302,6 +297,8 @@ def test_uss_unarchive_list(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None for file in USS_TEST_FILES.keys(): assert file[len(USS_TEMP_DIR)+1:] in result.get("targets") finally: @@ -322,7 +319,7 @@ def test_uss_single_unarchive_with_mode(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) for file in list(USS_TEST_FILES.keys()): @@ -330,7 +327,7 @@ def test_uss_single_unarchive_with_mode(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, remote_src=True, mode=dest_mode, @@ -338,6 +335,8 @@ def test_uss_single_unarchive_with_mode(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None dest_files = list(USS_TEST_FILES.keys()) for file in dest_files: stat_dest_res = hosts.all.stat(path=file) @@ -372,7 +371,7 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module): src=tar_file, dest=USS_TEMP_DIR, format={ - "name":"tar" + "type":"tar" }, force=True, ) @@ -380,6 +379,8 @@ def test_uss_unarchive_copy_to_remote(ansible_zos_module): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None # Command to assert the file is in place cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}/{tmp_file.name}") for c_result in cmd_result.contacted.values(): @@ -406,7 +407,7 @@ def test_uss_unarchive_encoding(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -416,7 +417,7 @@ def test_uss_unarchive_encoding(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, remote_src=True, encoding= encoding, @@ -426,6 +427,8 @@ def test_uss_unarchive_encoding(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None # Command to assert the file is in place cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") for c_result in cmd_result.contacted.values(): @@ -448,7 +451,7 @@ def test_uss_unarchive_encoding_skip_encoding(ansible_zos_module, ds_format): src=list(USS_TEST_FILES.keys()), dest=dest, format={ - "name":ds_format + "type":ds_format } ) # remove files @@ -465,7 +468,7 @@ def test_uss_unarchive_encoding_skip_encoding(ansible_zos_module, ds_format): unarchive_result = hosts.all.zos_unarchive( src=dest, format={ - "name":ds_format + "type":ds_format }, remote_src=True, encoding= encoding, @@ -475,6 +478,8 @@ def test_uss_unarchive_encoding_skip_encoding(ansible_zos_module, ds_format): for result in unarchive_result.contacted.values(): assert result.get("failed", False) is False assert result.get("changed") is True + assert result.get("targets") is not None + assert result.get("missing") is not None # Command to assert the file is in place cmd_result = hosts.all.shell(cmd=f"ls {USS_TEMP_DIR}") for c_result in cmd_result.contacted.values(): @@ -543,26 +548,13 @@ def test_mvs_unarchive_single_data_set( mvs_dest_archive = get_tmp_ds_name() dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" - # Clean env - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present", - replace=True - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW if record_format in ["v", "vb"]: @@ -577,11 +569,11 @@ def test_mvs_unarchive_single_data_set( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type": ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack": True } archive_result = hosts.all.zos_archive( src=dataset, @@ -603,14 +595,15 @@ def test_mvs_unarchive_single_data_set( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "type": ds_format, + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, dest_data_set={ "name":dataset, @@ -623,6 +616,8 @@ def test_mvs_unarchive_single_data_set( for result in unarchive_result.contacted.values(): assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("targets") is not None + assert result.get("missing") is not None # assert result.get("dest") == mvs_dest_archive # assert data_set.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = f"""dls "{hlq}.*" """) @@ -634,8 +629,8 @@ def test_mvs_unarchive_single_data_set( for result in cat_result.contacted.values(): assert result.get("stdout") == test_line finally: - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") + hosts.all.shell(cmd=f"drm '{mvs_dest_archive}'") @pytest.mark.ds @pytest.mark.parametrize( @@ -677,26 +672,13 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( mvs_dest_archive = get_tmp_ds_name() dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" - # Clean env - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present", - replace=True - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW if record_format in ["v", "vb"]: @@ -711,13 +693,13 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } - format_dict["format_options"] = { - "use_adrdssu":True + format_dict["options"] = { + "adrdssu":True } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") + format_dict["options"].update(spack=True) archive_result = hosts.all.zos_archive( src=dataset, dest=mvs_dest_archive, @@ -732,14 +714,18 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "type": ds_format, + "options": { + "adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True ) @@ -747,14 +733,16 @@ def test_mvs_unarchive_single_data_set_use_adrdssu( for result in unarchive_result.contacted.values(): assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("targets") is not None + assert result.get("missing") is not None # assert result.get("dest") == mvs_dest_archive # assert data_set.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = f"""dls "{hlq}.*" """) for c_result in cmd_result.contacted.values(): assert dataset in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") + hosts.all.shell(cmd=f"drm '{mvs_dest_archive}'") @pytest.mark.ds @pytest.mark.parametrize( @@ -803,12 +791,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, ds_form hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name").replace('$', '\\$'))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -818,12 +806,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, ds_form dataset = dataset.replace("$", "/$") hosts.all.shell(cmd=f"drm {dataset}*") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "type": ds_format, + "options": { + "adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, force=True ) @@ -832,6 +824,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, ds_form assert result.get("changed") is True assert result.get("failed", False) is False assert result.get("src") == mvs_dest_archive + assert result.get("targets") is not None + assert result.get("missing") is not None cmd_result = hosts.all.shell(cmd=f"""dls "{hlq}.*" """) for c_result in cmd_result.contacted.values(): @@ -839,8 +833,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu(ansible_zos_module, ds_form assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f""" drm '{dataset}*' """) + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -893,12 +887,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include( hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -911,13 +905,17 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include( # remote data_sets from host hosts.all.shell(cmd=f"drm {dataset}*") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "type": ds_format, + "options": { + "adrdssu": True, + } + } # Unarchive action include_ds = f"{dataset}0" unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, include=[include_ds], ) @@ -927,6 +925,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include( assert result.get("changed") is True assert result.get("failed", False) is False assert result.get("src") == mvs_dest_archive + assert result.get("targets") is not None + assert result.get("missing") is not None cmd_result = hosts.all.shell(cmd=f"""dls "{hlqua}.*" """) for c_result in cmd_result.contacted.values(): @@ -938,8 +938,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_include( assert target_ds.get("name") not in result.get("targets") assert target_ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -992,12 +992,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude( hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -1007,13 +1007,17 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude( # remote data_sets from host hosts.all.shell(cmd=f""" drm "{dataset}*" """) - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "type": ds_format, + "options": { + "adrdssu": True, + } + } # Unarchive action exclude_ds = f"{dataset}0" unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, exclude=[exclude_ds], ) @@ -1022,6 +1026,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude( assert result.get("changed") is True assert result.get("failed", False) is False assert result.get("src") == mvs_dest_archive + assert result.get("targets") is not None + assert result.get("missing") is not None cmd_result = hosts.all.shell(cmd=f""" dls "{hlqua}.*" """) for c_result in cmd_result.contacted.values(): @@ -1033,8 +1039,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_exclude( assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -1083,12 +1089,12 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, ds_format, dat hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -1098,12 +1104,16 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, ds_format, dat # remote data_sets from host hosts.all.shell(cmd=f"drm {dataset}*") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "type": ds_format, + "options": { + "adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, list=True ) @@ -1112,6 +1122,8 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, ds_format, dat assert result.get("changed") is False assert result.get("failed", False) is False assert result.get("src") == mvs_dest_archive + assert result.get("targets") is not None + assert result.get("missing") is not None cmd_result = hosts.all.shell(cmd=f"""dls "{hlq}.*" """) for c_result in cmd_result.contacted.values(): @@ -1119,8 +1131,8 @@ def test_mvs_unarchive_multiple_data_set_list(ansible_zos_module, ds_format, dat assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") not in c_result.get("stdout") finally: - hosts.all.shell(cmd=f"""drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -1184,24 +1196,28 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force( hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name"))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, format=format_dict, ) - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "type": ds_format, + "options": { + "adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, force=force ) @@ -1211,6 +1227,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force( assert result.get("changed") is True assert result.get("failed", False) is False assert result.get("src") == mvs_dest_archive + assert result.get("targets") is not None + assert result.get("missing") is not None cmd_result = hosts.all.shell(cmd=f"""dls "{hlqua}.*" """) for c_result in cmd_result.contacted.values(): @@ -1221,8 +1239,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_force( assert result.get("changed") is False assert result.get("failed", False) is True finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @pytest.mark.parametrize( @@ -1265,25 +1283,13 @@ def test_mvs_unarchive_single_data_set_remote_src( dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" tmp_folder = tempfile.TemporaryDirectory(prefix="tmpfetch") - # Clean env - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present" - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") # Write some content into src the same size of the record, # need to reduce 4 from V and VB due to RDW if record_format in ["v", "vb"]: @@ -1298,13 +1304,13 @@ def test_mvs_unarchive_single_data_set_remote_src( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } - format_dict["format_options"] = { - "use_adrdssu":True + format_dict["options"] = { + "adrdssu":True } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") + format_dict["options"].update(spack=True) archive_result = hosts.all.zos_archive( src=dataset, dest=mvs_dest_archive, @@ -1318,30 +1324,36 @@ def test_mvs_unarchive_single_data_set_remote_src( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f"drm '{dataset}'") # fetch archive data set into tmp folder fetch_result = hosts.all.zos_fetch( src=mvs_dest_archive, dest=tmp_folder.name, - is_binary=True + binary=True ) for res in fetch_result.contacted.values(): source_path = res.get("dest") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "type": ds_format, + "options": { + "adrdssu": True + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=source_path, - format=format_dict, + format=unarchive_format_dict, remote_src=False, ) for result in unarchive_result.contacted.values(): assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("targets") is not None + assert result.get("missing") is not None # assert result.get("dest") == mvs_dest_archive # assert data_set.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = f"""dls "{hlq}.*" """) @@ -1355,8 +1367,8 @@ def test_mvs_unarchive_single_data_set_remote_src( finally: - hosts.all.shell(cmd=f"drm {dataset}*") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") tmp_folder.cleanup() @@ -1378,9 +1390,6 @@ def test_mvs_unarchive_single_data_set_remote_src( }, ] ) -@pytest.mark.parametrize( - "record_length", [80] -) @pytest.mark.parametrize( "encoding", [ {"from": "IBM-1047", "to": "ISO8859-1"}, @@ -1390,7 +1399,6 @@ def test_mvs_unarchive_encoding( ansible_zos_module, ds_format, data_set, - record_length, encoding ): try: @@ -1399,26 +1407,14 @@ def test_mvs_unarchive_encoding( dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" record_format = "fb" - # Clean env - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + record_length = 80 # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present", - replace=True - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") test_line = "a" * record_length for member in data_set.get("members"): if member == "": @@ -1428,11 +1424,11 @@ def test_mvs_unarchive_encoding( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=dataset, @@ -1454,20 +1450,21 @@ def test_mvs_unarchive_encoding( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}' ") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "type": ds_format, + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, dest_data_set={ "name":dataset, "type":data_set.get("dstype"), "record_format":record_format, - "record_length":record_length + "record_length":record_length }, encoding=encoding, ) @@ -1475,14 +1472,16 @@ def test_mvs_unarchive_encoding( for result in unarchive_result.contacted.values(): assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("targets") is not None + assert result.get("missing") is not None # assert result.get("dest") == mvs_dest_archive # assert data_set.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = f"""dls "{hlq}.*" """) for c_result in cmd_result.contacted.values(): assert dataset in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @@ -1524,26 +1523,13 @@ def test_mvs_unarchive_encoding_skip_encoding( dataset = get_tmp_ds_name(3) hlq = "ANSIBLE" record_format = "fb" - # Clean env - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") # Create source data set - hosts.all.zos_data_set( - name=dataset, - type=data_set.get("dstype"), - state="present", - record_length=record_length, - record_format=record_format, - replace=True - ) + hosts.all.shell(cmd=f"dtouch -t{data_set.get('dstype')} -l{record_length} -r{record_format} '{dataset}'") # Create members if needed if data_set.get("dstype") in ["pds", "pdse"]: for member in data_set.get("members"): - hosts.all.zos_data_set( - name=f"{dataset}({member})", - type="member", - state="present", - replace=True - ) + # This creates empty members + hosts.all.shell(cmd=f"decho '' '{dataset}({member})'") test_line = "a" * record_length for member in data_set.get("members"): if member == "": @@ -1553,11 +1539,11 @@ def test_mvs_unarchive_encoding_skip_encoding( hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_to_write}\"") format_dict = { - "name":ds_format + "type":ds_format } if ds_format == "terse": - format_dict["format_options"] = { - "terse_pack":"spack" + format_dict["options"] = { + "spack":True } archive_result = hosts.all.zos_archive( src=dataset, @@ -1579,12 +1565,13 @@ def test_mvs_unarchive_encoding_skip_encoding( for c_result in cmd_result.contacted.values(): assert mvs_dest_archive in c_result.get("stdout") - hosts.all.zos_data_set(name=dataset, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}' ") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "type": ds_format, + } - #skipping some files to encode + # skipping some files to encode skip_encoding_list = [dataset] current_encoding_config = encoding.copy() current_encoding_config["skip_encoding"] = skip_encoding_list @@ -1592,13 +1579,13 @@ def test_mvs_unarchive_encoding_skip_encoding( # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, dest_data_set={ "name":dataset, "type":data_set.get("dstype"), "record_format":record_format, - "record_length":record_length + "record_length":record_length }, encoding=encoding, ) @@ -1606,14 +1593,16 @@ def test_mvs_unarchive_encoding_skip_encoding( for result in unarchive_result.contacted.values(): assert result.get("changed") is True assert result.get("failed", False) is False + assert result.get("targets") is not None + assert result.get("missing") is not None # assert result.get("dest") == mvs_dest_archive # assert data_set.get("name") in result.get("archived") cmd_result = hosts.all.shell(cmd = f"""dls "{hlq}.*" """) for c_result in cmd_result.contacted.values(): assert dataset in c_result.get("stdout") finally: - hosts.all.zos_data_set(name=dataset, state="absent") - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") @pytest.mark.ds @@ -1668,12 +1657,12 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_encoding(ansible_zos_module hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds.get("name").replace('$', '\\$'))) format_dict = { - "name":ds_format, - "format_options":{} + "type":ds_format, + "options":{} } if ds_format == "terse": - format_dict["format_options"].update(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) + format_dict["options"].update(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=f"{dataset}*", dest=mvs_dest_archive, @@ -1683,12 +1672,16 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_encoding(ansible_zos_module dataset = dataset.replace("$", "/$") hosts.all.shell(cmd=f"drm {dataset}*") - if ds_format == "terse": - del format_dict["format_options"]["terse_pack"] + unarchive_format_dict = { + "type": ds_format, + "options": { + "adrdssu": True, + } + } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=mvs_dest_archive, - format=format_dict, + format=unarchive_format_dict, remote_src=True, force=True, encoding=encoding @@ -1698,6 +1691,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_encoding(ansible_zos_module assert result.get("changed") is True assert result.get("failed", False) is False assert result.get("src") == mvs_dest_archive + assert result.get("targets") is not None + assert result.get("missing") is not None cmd_result = hosts.all.shell(cmd=f"""dls "{hlq}.*" """) for c_result in cmd_result.contacted.values(): @@ -1705,8 +1700,8 @@ def test_mvs_unarchive_multiple_data_set_use_adrdssu_encoding(ansible_zos_module assert target_ds.get("name") in result.get("targets") assert target_ds.get("name") in c_result.get("stdout") finally: - hosts.all.shell(cmd=f""" drm "{dataset}*" """) - hosts.all.zos_data_set(name=mvs_dest_archive, state="absent") + hosts.all.shell(cmd=f" drm '{dataset}*' ") + hosts.all.shell(cmd=f" drm '{mvs_dest_archive}*' ") def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): @@ -1716,17 +1711,17 @@ def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): # False path source_path = "/tmp/OMVSADM.NULL" - format_dict = { - "name":'terse' + unarchive_format_dict = { + "type":'terse' } - format_dict["format_options"] = { - "use_adrdssu":True + unarchive_format_dict["options"] = { + "adrdssu":True } # Unarchive action unarchive_result = hosts.all.zos_unarchive( src=source_path, - format=format_dict, + format=unarchive_format_dict, remote_src=False, ) @@ -1738,24 +1733,26 @@ def test_mvs_unarchive_fail_copy_remote_src(ansible_zos_module): @pytest.mark.ds @pytest.mark.parametrize( - "format", [ + "ds_format", [ "terse", "xmit", ]) @pytest.mark.parametrize("dstype", ["seq", "pds", "pdse"]) -def test_gdg_unarchive(ansible_zos_module, dstype, format): +def test_gdg_unarchive(ansible_zos_module, dstype, ds_format): try: HLQ = "ANSIBLE" hosts = ansible_zos_module data_set_name = get_tmp_ds_name(symbols=True) archive_data_set = get_tmp_ds_name(symbols=True) - results = hosts.all.zos_data_set( - batch = [ - { "name":data_set_name, "state":"present", "type":"gdg", "limit":3}, - { "name":f"{data_set_name}(+1)", "state":"present", "type":dstype}, - { "name":f"{data_set_name}(+1)", "state":"present", "type":dstype}, - ] - ) + results = hosts.all.shell(cmd=f"dtouch -tGDG -L3 '{data_set_name}'") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{data_set_name}(+1)'") + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.shell(cmd=f"dtouch -t{dstype} '{data_set_name}(+1)'") for result in results.contacted.values(): assert result.get("changed") is True assert result.get("module_stderr") is None @@ -1778,12 +1775,10 @@ def test_gdg_unarchive(ansible_zos_module, dstype, format): for ds in ds_to_write: hosts.all.shell(cmd="decho '{0}' \"{1}\"".format(test_line, ds)) - format_dict = dict(name=format, format_options=dict()) - if format == "terse": - format_dict["format_options"] = dict(terse_pack="spack") - format_dict["format_options"].update(use_adrdssu=True) - if format == "terse": - del format_dict["format_options"]["terse_pack"] + format_dict = dict(name=ds_format, options=dict()) + if ds_format == "terse": + format_dict["options"] = dict(spack=True) + format_dict["options"].update(adrdssu=True) archive_result = hosts.all.zos_archive( src=[f"{data_set_name}(0)",f"{data_set_name}(-1)" ], dest=archive_data_set, @@ -1797,16 +1792,16 @@ def test_gdg_unarchive(ansible_zos_module, dstype, format): cmd_result = hosts.all.shell(cmd = """dls "{0}.*" """.format(HLQ)) for c_result in cmd_result.contacted.values(): assert archive_data_set in c_result.get("stdout") - - hosts.all.zos_data_set( - batch=[ - {"name": f"{data_set_name}(-1)", "state": "absent"}, - {"name": f"{data_set_name}(0)", "state": "absent"}, - ] - ) + hosts.all.shell(cmd=f"drm '{data_set_name}(-1)' && drm '{data_set_name}(0)'") + unarchive_format_dict = { + "type": ds_format, + "options": { + "adrdssu": True + } + } unarchive_result = hosts.all.zos_unarchive( src=archive_data_set, - format=format_dict, + format=unarchive_format_dict, remote_src=True ) for result in unarchive_result.contacted.values(): @@ -1846,7 +1841,7 @@ def test_zos_unarchive_async(ansible_zos_module, get_config): archive_result = hosts_zos.all.zos_archive(src=list(USS_TEST_FILES.keys()), dest=dest, format=dict( - name=archive_format + type=archive_format )) # remove files for file in USS_TEST_FILES.keys(): diff --git a/tests/functional/modules/test_zos_volume_init_func.py b/tests/functional/modules/test_zos_volume_init_func.py index 8f7f58bfe4..f5620d351c 100644 --- a/tests/functional/modules/test_zos_volume_init_func.py +++ b/tests/functional/modules/test_zos_volume_init_func.py @@ -490,7 +490,7 @@ def test_bad_param_volid_value_too_long(ansible_zos_module, volumes_unit_on_syst hosts.all.zos_operator(cmd=f"vary {address},online") -# Note - volume needs to be sms managed for zos_data_set to work. Possible +# Note - volume needs to be sms managed for data set creation to work. Possible # points of failure are: # unable to init volume first time around # unable to allocate data set @@ -530,7 +530,7 @@ def test_no_existing_data_sets_check(ansible_zos_module, volumes_unit_on_systems hosts.all.zos_operator(cmd=f"vary {address},online") # allocate data set to volume - hosts.all.zos_data_set(name=dataset, type='pds', volumes=volume) + hosts.all.shell(cmd=f"dtouch -tpds -V{volume} '{dataset}'") # take volume back offline hosts.all.zos_operator(cmd=f"vary {address},offline") @@ -550,7 +550,7 @@ def test_no_existing_data_sets_check(ansible_zos_module, volumes_unit_on_systems hosts.all.zos_operator(cmd=f"vary {address},online") # remove data set - hosts.all.zos_data_set(name=dataset, state='absent') + hosts.all.shell(cmd=f"drm '{dataset}'") # Note - technically verify_offline is not REQUIRED but it defaults to True diff --git a/tests/helpers/users.py b/tests/helpers/users.py index 4b370faef9..a28681a6ab 100644 --- a/tests/helpers/users.py +++ b/tests/helpers/users.py @@ -734,7 +734,7 @@ def _create_managed_user(self, managed_user: ManagedUserType) -> Tuple[str, str] try: cmd=f"{add_user_cmd.getvalue()}" - # need to connect with ssh -i /tmp/UPGLSFLH/id_rsa UPGLSFLH@ec01136a.vmec.svl.ibm.com + # need to connect with ssh -i /tmp/UPGLSFLH/id_rsa UPGLSFLH@xyz.com add_user_attributes = self._connect(self._remote_host, self._model_user,cmd) # Because this is a tsocmd run through shell, any user with a $ will be expanded and thus truncated, you can't change diff --git a/tests/helpers/volumes.py b/tests/helpers/volumes.py index c42df75289..bd5029e80e 100644 --- a/tests/helpers/volumes.py +++ b/tests/helpers/volumes.py @@ -72,6 +72,16 @@ def get_available_vol_addr(self): print("Not more volumes in disposal return volume USER02") return "USER02","01A2" + def get_available_vol_with_sms(self): + """ Check in the list of volumes one on use or not, also send a default + volume USER02 as is the one with less data sets included.""" + for volume in self.volumes: + if not (volume.in_use): + volume.use() + return volume.name, volume.unit + print("Not more volumes in disposal return volume 222222") + return "222222","DB2SMS10" + def free_vol(self, vol): """ Check from the array the volume is already free for other test to use.""" for volume in self.volumes: @@ -114,7 +124,9 @@ def get_volumes(ansible_zos_module, path): storage_online.append(vol_w_info[3]) # Insert a volumes for the class ls_Volumes to give flag of in_use and correct manage for vol in storage_online: - list_volumes.append(vol) + valid = validate_ds_creation_on_volume(hosts, vol, 'seq') + if valid: + list_volumes.append(vol) if prefer_vols is not None: list(map(str, prefer_vols)) prefer_vols.extend(list_volumes) @@ -175,7 +187,7 @@ def create_vvds_on_volume( ansible_zos_module, volume): return False -def get_volume_and_unit(ansible_zos_module, path): +def get_volume_and_unit(ansible_zos_module): """Get an array of available volumes, and it's unit""" # Using the command d u,dasd,online to fill an array of available volumes with the priority # of of actives (A) and storage (STRG) first then online (O) and storage and if is needed, the @@ -184,7 +196,6 @@ def get_volume_and_unit(ansible_zos_module, path): hosts = ansible_zos_module list_volumes = [] all_volumes_list = [] - priv_online = [] flag = False iteration = 5 volumes_datasets = [] @@ -208,18 +219,8 @@ def get_volume_and_unit(ansible_zos_module, path): if len(vol_w_info)>3: if vol_w_info[2] == 'O' and "USER" in vol_w_info[3] and vol_w_info[4] == "PRIV/RSDNT": - # The next creation of dataset is to validate if the volume will work properly for the test suite - dataset = get_tmp_ds_name() - valid_creation = hosts.all.zos_data_set(name=dataset, type='pds', volumes=f'{vol_w_info[3]}') - - for valid in valid_creation.contacted.values(): - if valid.get("changed") == "false": - valid = False - else: - valid = True - hosts.all.zos_data_set(name=dataset, state="absent") - + valid = validate_ds_creation_on_volume(hosts, vol_w_info[3], "pds") # When is a valid volume is required to get the datasets present on the volume if valid: ds_on_vol = hosts.all.shell(cmd=f"vtocls {vol_w_info[3]}") @@ -232,3 +233,63 @@ def get_volume_and_unit(ansible_zos_module, path): list_volumes = [[x[1], x[2]] for x in sorted_volumes] return list_volumes + +def get_volumes_sms_mgmt_class(ansible_zos_module, volumes_on_system): + """ + From the current volumes available to write and delete dataset search for any sms group that is associate with. + """ + volumes_smsclass = find_volume_with_sms_class(ansible_zos_module, volumes_on_system) + if len(volumes_smsclass) > 0: + return volumes_smsclass + + volumes_smsclass = [] + print("Warning: No sms storage volumes on system, using DB2SMS10") + for vol in volumes_on_system: + volumes_smsclass.append([vol,'DB2SMS10']) + return volumes_smsclass + + +def find_volume_with_sms_class(ansible_zos_module, volumes_on_system): + """ + Fetches all volumes in the system and returns a list of volumes for + which there are sms class. + """ + hosts = ansible_zos_module + vols_sms = [] + content = "" + # D SMS,STORGRP(SG1),LISTVOL + # D SMS,STORGRP(ALL),LISTVOL + # D SMS,STORGRP(),LISTVOL + # D SMS,VOL(XXXXXX) + for vol in volumes_on_system: + response = hosts.all.zos_operator(cmd=f"D SMS,VOL({vol})") + for res in response.contacted.values(): + content = res.get('content') + for line in content: + if 'REJECTED' in line or 'EC' in line: + continue + else: + words = line.lstrip() + if words.startswith(vol): + sms_grp = words.strip().split()[-1] + if sms_grp != "PRIMARY": + vols_sms.append([vol, sms_grp]) + continue + return vols_sms + + +def validate_ds_creation_on_volume(ansible_zos_module, vol, type): + """ + Utility to validate the volumes we get from the system is available to create and delete datasets + """ + valid = True + hosts = ansible_zos_module + dataset = get_tmp_ds_name() + valid_creation = hosts.all.zos_data_set(name=dataset, type=type, volumes=vol) + for valid in valid_creation.contacted.values(): + if valid.get("changed") == "false": + valid = False + else: + valid = True + hosts.all.zos_data_set(name=dataset, state="absent") + return valid diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index b8349e60d2..be5ebb4e7a 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -25,3 +25,4 @@ plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Lice plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_started_task.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt index ab5f45a7cb..29abd16b32 100644 --- a/tests/sanity/ignore-2.16.txt +++ b/tests/sanity/ignore-2.16.txt @@ -26,3 +26,4 @@ plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Lice plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_started_task.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/sanity/ignore-2.17.txt b/tests/sanity/ignore-2.17.txt index d682068ed5..874f4f684c 100644 --- a/tests/sanity/ignore-2.17.txt +++ b/tests/sanity/ignore-2.17.txt @@ -26,3 +26,4 @@ plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Lice plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_started_task.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 diff --git a/tests/sanity/ignore-2.18.txt b/tests/sanity/ignore-2.18.txt index 52cbed2aa3..048fb8e797 100644 --- a/tests/sanity/ignore-2.18.txt +++ b/tests/sanity/ignore-2.18.txt @@ -25,4 +25,5 @@ plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Lic plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 -plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file +plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_started_task.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/sanity/ignore-2.19.txt b/tests/sanity/ignore-2.19.txt new file mode 100644 index 0000000000..048fb8e797 --- /dev/null +++ b/tests/sanity/ignore-2.19.txt @@ -0,0 +1,29 @@ +plugins/modules/zos_apf.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_backup_restore.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_blockinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_copy.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_data_set.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_encode.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_fetch.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_find.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_output.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_job_submit.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_lineinfile.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_mount.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_mvs_raw.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_operator.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_operator_action_query.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.rexx validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_ping.rexx validate-modules:invalid-extension # Opened issue https://github.com/ansible/ansible/issues/79784 +plugins/modules/zos_replace.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_script.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_stat.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_tso_command.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_gather_facts.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_volume_init.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_archive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_unarchive.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_zfs_resize.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/zos_started_task.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/unit/test_zos_backup_restore_unit.py b/tests/unit/test_zos_backup_restore_unit.py index c6e8ce9c85..37a3f9c41a 100644 --- a/tests/unit/test_zos_backup_restore_unit.py +++ b/tests/unit/test_zos_backup_restore_unit.py @@ -142,11 +142,14 @@ def test_valid_backup_name(zos_backup_restore_mocker, backup_name): ["MYIclASS", "M0341CSS", "storagec", "s", "$*@@%#1", "#"], ) def test_valid_sms_classes(zos_backup_restore_mocker, sms_class): + sms = { + "storage_class":sms_class, + "management_class":sms_class + } valid_args = dict( operation="backup", data_sets=dict(include="user.*"), - sms_storage_class=sms_class, - sms_management_class=sms_class, + sms=sms ) assert_args_valid(zos_backup_restore_mocker, valid_args) @@ -230,11 +233,14 @@ def test_invalid_backup_name(zos_backup_restore_mocker, backup_name): ["5555bad", "toolongclass", "bad!char", True, False, 100, 0, -1], ) def test_invalid_sms_classes(zos_backup_restore_mocker, sms_class): + sms = { + "storage_class":sms_class, + "management_class":sms_class + } valid_args = dict( operation="backup", data_sets=dict(include="user.*"), - sms_storage_class=sms_class, - sms_management_class=sms_class, + sms=sms ) assert_args_invalid(zos_backup_restore_mocker, valid_args) diff --git a/tests/unit/test_zos_operator_action_query_unit.py b/tests/unit/test_zos_operator_action_query_unit.py index c69a89a20b..16ff8e9577 100644 --- a/tests/unit/test_zos_operator_action_query_unit.py +++ b/tests/unit/test_zos_operator_action_query_unit.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) IBM Corporation 2019, 2020 +# Copyright (c) IBM Corporation 2019, 2025 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -32,18 +32,18 @@ dummy_dict2 = {"system": "mv2c"} -dummy_dict3 = {"message_id": "DFH*"} +dummy_dict3 = {"msg_id": "DFH*"} -dummy_dict4_uppercase = {"message_id": "DFH*", "system": "MV28"} +dummy_dict4_uppercase = {"msg_id": "DFH*", "system": "MV28"} -dummy_dict4_lowercase = {"message_id": "DFH*", "system": "mv28"} +dummy_dict4_lowercase = {"msg_id": "DFH*", "system": "mv28"} -dummy_dict5 = {"message_filter": {"filter": "^.*IMS.*$", "use_regex": True}} +dummy_dict5 = {"msg_filter": {"filter": "^.*IMS.*$", "use_regex": True}} -dummy_dict6 = {"system": "mv27", "message_id": "DFS*", "job_name": "IM5H*", "message_filter": {"filter": "IMS"}} +dummy_dict6 = {"system": "mv27", "msg_id": "DFS*", "job_name": "IM5H*", "msg_filter": {"filter": "IMS"}} -dummy_dict_invalid_message = {"message_id": "$$#$%#"} -dummy_dict_invalid_filter = {"message_filter": {"filter": "*IMS", "use_regex": True}} +dummy_dict_invalid_message = {"msg_id": "$$#$%#"} +dummy_dict_invalid_filter = {"msg_filter": {"filter": "*IMS", "use_regex": True}} dummy_dict_invalid_job_name = {"job_name": "IM5H123456"} dummy_dict_invalid_system = {"system": "mv2712345"}