From d3cc669669769d8d2118a5081a95f3d42e6348e2 Mon Sep 17 00:00:00 2001 From: Rich Parker Date: Fri, 30 Aug 2024 15:47:09 -0400 Subject: [PATCH] updated data set basics, submit query retrieve and zos_script The changes were to remove errors from zTrial systems --- .../data_set_basics/data_set_basics.yml | 361 ++++++------ .../submit_query_retrieve.yml | 527 +++++++++--------- zos_concepts/zos_script/zos_script.yml | 131 +++-- 3 files changed, 557 insertions(+), 462 deletions(-) mode change 100644 => 100755 zos_concepts/data_sets/data_set_basics/data_set_basics.yml mode change 100644 => 100755 zos_concepts/jobs/submit_query_retrieve/submit_query_retrieve.yml diff --git a/zos_concepts/data_sets/data_set_basics/data_set_basics.yml b/zos_concepts/data_sets/data_set_basics/data_set_basics.yml old mode 100644 new mode 100755 index 03f3bdba..d4323eb8 --- a/zos_concepts/data_sets/data_set_basics/data_set_basics.yml +++ b/zos_concepts/data_sets/data_set_basics/data_set_basics.yml @@ -1,180 +1,181 @@ -############################################################################### -# © Copyright IBM Corporation 2020, 2024 -############################################################################### -# This sample playbook demonstrates basic data set operations using modules -# included in the Red Hat Ansible Certified Content for IBM Z core collection. -# -# Usage: -# ansible-playbook -i -# -# Example: -# ansible-playbook -i inventories data_set_basics.yml -# ansible-playbook -i inventories data_set_basics.yml -v -# -# Additional facts for this playbook can be configured to override the defaults -# by reviewing the "Fact setting" section of this playbook, for example, -# `data_set_name` and `system_name`. -# -# Requirements: -# IBM z/OS core collection 1.2.0 or later -# -# Configure: -# tgt_tmp_dir - this is the USS directory on the target which will be written -# to for this example. -# -# Optional: -# data_set_name - this is the data set name that will be created during -# execution of this sample. -# pds_name - this is the pds name that will be used during execution of this -# sample. -# system_name - this is the system name that will be used during this example, -# determined by executing `uname -n` on the target. -############################################################################### - ---- - -- hosts: zos_host - collections: - - ibm.ibm_zos_core - gather_facts: false - vars: - tgt_tmp_dir: "/tmp" - environment: "{{ environment_vars }}" - - tasks: - # ########################################################################## - # Generate and set temporary names for data sets - # ########################################################################## - - name: Create temp sequential data set name - command: "mvstmp {{ ansible_user | upper }}" - register: tmp_ds_seq - - - name: Create temp PDS name - command: "mvstmp {{ ansible_user | upper }}" - register: tmp_ds_pds - - - name: Set names for sequential data set and pds for use by this sample - set_fact: - data_set_name: "{{ tmp_ds_seq.stdout }}" - pds_name: "{{ tmp_ds_pds.stdout }}" - - - name: Fact `data_set_name` and `pds_name` set with values - debug: - msg: - - "sequential data set name - {{ data_set_name }}" - - "pds name - {{ pds_name }}" - - ############################################################################ - # Modules zos_copy, zos_data_set, zos_fetch - ############################################################################ - # +------------------------------------------------------------------------- - # | 1. Create a sequential data set - # | 2. Create a USS file and populate it with some data - # | 3. Copy the USS file to the sequential data set - # | 4. Create a PDS and a member within the PDS - # | 5. Copy the sequential data set to the PDS member - # | 6. Create a new PDS, replacing the existing one - # | 7. Delete all data sets that were created during playbook execution - # +------------------------------------------------------------------------- - - - name: Create a sequential data set - zos_data_set: - name: "{{ data_set_name }}" - type: seq - state: present - replace: true - format: fb - record_length: 100 - space_primary: 5 - space_type: m - register: result - - - name: Response for data set creation - debug: - msg: "{{ result }}" - - - name: Remove old {{ tgt_tmp_dir }}/HELLO if it exists, for idempotency - file: - path: "{{ tgt_tmp_dir }}/HELLO" - state: absent - register: result - - - name: Response for USS file deletion - debug: - msg: "{{ result }}" - - - name: Copy HELLO.jcl from project to USS file - zos_copy: - src: "{{ playbook_dir }}/files/HELLO.jcl" - dest: "{{ tgt_tmp_dir }}/HELLO" - register: result - - - name: Response for populating USS file {{ tgt_tmp_dir }}/HELLO - on target {{ inventory_hostname }} - debug: - msg: "{{ result }}" - - - name: Copy the USS file to the sequential data set - zos_copy: - src: "{{ tgt_tmp_dir }}/HELLO" - dest: "{{ data_set_name }}" - remote_src: true - register: result - - - name: Response for copying USS file to data set - debug: - msg: "{{ result }}" - - - name: Create a PDS - zos_data_set: - name: "{{ pds_name }}" - type: pds - space_primary: 5 - space_type: m - format: fba - record_length: 100 - register: result - - - name: Response for creating the PDS - debug: - msg: "{{ result }}" - - - name: Remove the target PDS member if it exists, for idempotency. - zos_data_set: - name: "{{ pds_name }}(HELLO)" - type: member - state: absent - register: result - - - name: Response for removing the PDS member - debug: - msg: "{{ result }}" - - - name: Copy the sequential data set to PDS member - zos_copy: - src: "{{ data_set_name }}" - dest: "{{ pds_name }}(HELLO)" - remote_src: true - register: result - - - name: Response for copying {{ data_set_name }} to {{ pds_name }}(HELLO) - debug: - msg: "{{ result }}" - - - name: Delete the data sets - zos_data_set: - batch: - - name: "{{ data_set_name }}" - state: absent - - name: "{{ pds_name }}" - state: absent - register: result - - - name: Response for deleting the data sets - debug: - msg: "{{ result }}" - - - name: Clean up {{ tgt_tmp_dir }} - file: - path: "{{ tgt_tmp_dir }}/HELLO" - state: absent +############################################################################### +# © Copyright IBM Corporation 2020, 2021 +############################################################################### +# This sample playbook demonstrates basic data set operations using modules +# included in the Red Hat Ansible Certified Content for IBM Z core collection. +# +# Usage: +# ansible-playbook -i +# +# Example: +# ansible-playbook -i inventories data_set_basics.yml +# ansible-playbook -i inventories data_set_basics.yml -v +# +# Additional facts for this playbook can be configured to override the defaults +# by reviewing the "Fact setting" section of this playbook, for example, +# `data_set_name` and `system_name`. +# +# Requirements: +# IBM z/OS core collection 1.2.0 or later +# +# Configure: +# tgt_tmp_dir - this is the USS directory on the target which will be written +# to for this example. +# +# Optional: +# data_set_name - this is the data set name that will be created during +# execution of this sample. +# pds_name - this is the pds name that will be used during execution of this +# sample. +# system_name - this is the system name that will be used during this example, +# determined by executing `uname -n` on the target. +############################################################################### + +--- +- hosts: all + collections: + - ibm.ibm_zos_core + gather_facts: no + vars: + tgt_tmp_dir: "/tmp" + environment: "{{ environment_vars }}" + + tasks: + # ########################################################################## + # Generate and set temporary names for data sets + # ########################################################################## + - name: Create temp sequential data set name + command: "mvstmp {{ ansible_user | upper }}" + register: tmp_ds_seq + + - name: Create temp sequential data set name + command: "mvstmp {{ ansible_user | upper }}" + register: tmp_ds_pds + + - name: Set names for sequential data set and pds for use by this sample + set_fact: + data_set_name: "{{ tmp_ds_seq.stdout }}" + pds_name: "{{ tmp_ds_pds.stdout }}" + + - name: Fact `data_set_name` and `pds_name` set with values + debug: + msg: + - "sequential data set name - {{ data_set_name }}" + - "pds name - {{ pds_name }}" + + ############################################################################ + # Modules zos_data_set, zos_fetch + ############################################################################ + # +------------------------------------------------------------------------- + # | 1. Create a sequential data set + # | 2. Create a USS file and populate it with some data + # | 3. Copy the USS file to the sequential data set + # | 4. Create a PDS and a member within the PDS + # | 5. Copy the sequential data set to the PDS member + # | 6. Create a new PDS, replacing the existing one + # | 7. Delete all data sets that were created during playbook execution + # +------------------------------------------------------------------------- + + - name: Create a sequential data set + zos_data_set: + name: "{{ data_set_name }}" + type: seq + state: present + replace: true + format: fb + record_length: 100 + space_primary: 5 + space_type: m + register: result + + - name: Response for data set creation + debug: + msg: "{{ result }}" + + - name: Create a USS file + file: + path: "{{ tgt_tmp_dir }}/HELLO" + state: touch + register: result + + - name: Response for USS file creation + debug: + msg: "{{ result }}" + + - name: zos_copy + zos_copy: + src: "{{ playbook_dir }}/files/HELLO.jcl" + dest: "{{ tgt_tmp_dir }}/HELLO" + # remote_src: yes + force: true + register: result + + - name: Response for populating USS file {{ tgt_tmp_dir }}/HELLO + on target {{ inventory_hostname }} + debug: + msg: "{{ result }}" + + - name: Copy the USS file to the sequential data set + zos_copy: + src: "{{ tgt_tmp_dir }}/HELLO" + dest: "{{ data_set_name }}" + remote_src: yes + register: result + + - name: Response for copying USS file to data set + debug: + msg: "{{ result }}" + + - name: Create a PDS + zos_data_set: + name: "{{ pds_name }}" + type: pds + space_primary: 5 + space_type: m + format: fba + record_length: 100 + register: result + + - name: Response for creating the PDS + debug: + msg: "{{ result }}" + + - name: Create a PDS member + zos_data_set: + name: "{{ pds_name }}(HELLO)" + type: member + register: result + + - name: Response for creating the PDS member + debug: + msg: "{{ result }}" + + - name: Copy the sequential data set to PDS member + zos_copy: + src: "{{ data_set_name }}" + dest: "{{ pds_name }}(HELLO)" + remote_src: yes + force: true + register: result + + - name: Response for copying {{ data_set_name }} to {{ pds_name }}(HELLO) + debug: + msg: "{{ result }}" + + - name: Delete the data sets + zos_data_set: + batch: + - name: "{{ data_set_name }}" + state: absent + - name: "{{ pds_name }}" + state: absent + register: result + + - name: Response for deleting the data sets + debug: + msg: "{{ result }}" + + - name: Clean up {{ tgt_tmp_dir }} + file: + path: "{{ tgt_tmp_dir }}/HELLO" + state: absent diff --git a/zos_concepts/jobs/submit_query_retrieve/submit_query_retrieve.yml b/zos_concepts/jobs/submit_query_retrieve/submit_query_retrieve.yml old mode 100644 new mode 100755 index a1c5c46c..18c5a7b1 --- a/zos_concepts/jobs/submit_query_retrieve/submit_query_retrieve.yml +++ b/zos_concepts/jobs/submit_query_retrieve/submit_query_retrieve.yml @@ -1,251 +1,276 @@ -############################################################################### -# © Copyright IBM Corporation 2020, 2024 -############################################################################### - -############################################################################### -# This sample playbook demonstrates how to submit jobs, query and retrieve job -# output using modules included in the Red Hat Ansible Certified Content for -# IBM Z core collection. -# -# Usage: -# ansible-playbook -i -# -# Example: -# ansible-playbook -i inventories submit_query_retrieve.yaml -# -# Additional facts for this playbook can be configured to override the defaults -# by reviewing the "Fact setting" section of this playbook, for example, -# `data_set_name`. -# -# Requirements: -# IBM z/OS core collection 1.1.0 or later # FIXME - update to correct version. -# -# Configure: -# tgt_tmp_dir - this is the USS directory on the target which will be written -# to for this example. -# Optional: -# data_set_name - this is the data set name that will be created during -# execution of this sample. -# job_name - this is the job name what will be used in this sample, if you -# change the HELLO.JCL job name, you must update this variable -############################################################################### - ---- -- hosts: zos_host - collections: - - ibm.ibm_zos_core - gather_facts: false - vars: - tgt_tmp_dir: "/tmp" - job_name: "HELLO" - environment: "{{ environment_vars }}" - - tasks: - # ########################################################################## - # Generate a temporary data set name - # ########################################################################## - - name: Create temp data set name - command: "mvstmp {{ ansible_user | upper }}" - register: tmp_ds - - # ########################################################################## - # Fact setting for use by this playbook - # ########################################################################## - - name: Setting fact `data_set_name` for use by this sample - set_fact: - data_set_name: "{{ tmp_ds.stdout }}" - - - name: Fact `data_set_name` set with value - debug: - msg: "{{ data_set_name }}" - - ############################################################################ - # Modules zos_data_set, zos_job_submit, zos_job_query, zos_job_output, - # zos_data_set, zos_copy. - ############################################################################ - # +------------------------------------------------------------------------- - # | Create a data set and member, create JCL on USS target, copy USS JCL to - # | data set, submit JCL in data set, query job, get job output. Repeat the - # | process with JCL submitted from USS file. - # +------------------------------------------------------------------------- - ############################################################################ - - - name: Create a PDS data set {{ data_set_name }} - zos_data_set: - name: "{{ data_set_name }}" - type: pds - space_primary: 5 - space_type: m - format: fb - record_length: 80 - replace: true - register: result - - - name: Response for create a PDS data set {{ data_set_name }} - debug: - msg: "{{ result }}" - - - name: Ensure JCL folder exists in USS to manage JCL - file: - path: "{{ tgt_tmp_dir }}/ansible/jcl" - state: directory - - - name: Response for ensure JCL folder exists in USS to manage JCL - debug: - msg: "{{ result }}" - - - name: Write HELLO JCL to USS in {{ tgt_tmp_dir }}/ansible/jcl/HELLO" - on target {{ inventory_hostname }} - zos_copy: - src: "{{ playbook_dir }}/files/HELLO.jcl" - dest: "{{ tgt_tmp_dir }}/ansible/jcl/HELLO" - register: result - - - name: Response for write HELLO JCL to USS - in {{ tgt_tmp_dir }}/ansible/jcl/HELLO" on - target {{ inventory_hostname }} - debug: - msg: "{{ result }}" - - - name: Populate {{ data_set_name }} member with data from USS file - in {{ tgt_tmp_dir }}/ansible/jcl - zos_copy: - src: "{{ tgt_tmp_dir }}/ansible/jcl/HELLO" - remote_src: true - dest: "{{ data_set_name }}(MEM1)" - register: result - - - name: Response for populate {{ data_set_name }} member with data from - USS file in {{ tgt_tmp_dir }}/ansible/jcl - debug: - msg: "{{ result }}" - - # +------------------------------------------------------------------------- - # | "with_sequence" is a type of conditional Ansible loop. The loop here - # | only runs for a single iteration and is included solely to demonstrate - # | how multiple jobs could be submitted under a single task for members of - # | a single PDS. - # +------------------------------------------------------------------------- - - - name: Submit the JCL {{ data_set_name }}(MEM1) - zos_job_submit: - src: "{{ data_set_name }}(MEM{{ item }})" - location: data_set - register: result - with_sequence: count=1 - - - name: Response for submit the JCL {{ data_set_name }}.(MEM1) - debug: - msg: "{{ result }}" - - # +------------------------------------------------------------------------- - # | There is a list of results returned by the zos_job_submit module. Each - # | result contains attributes from the job it refers to including job_id, - # | job_name, owner, content, and return codes. Since only one job was - # | submitted, it will be the first (and only) job in the results list. - # +------------------------------------------------------------------------- - - - name: Setting fact `job_id_pds` for id of job submitted above - set_fact: - job_id_pds: "{{ result.results[0].jobs[0].job_id }}" - - name: Fact `job_id_pds` set with value - debug: - msg: "{{ job_id_pds }}" - - - name: Query submitted job by job_id - zos_job_query: - job_id: "{{ job_id_pds }}" - register: result - - - name: Response for query submitted job by job_id - debug: - msg: "{{ result }}" - - - name: Get job output for job {{ job_id_pds }} from PDS member - zos_job_output: - job_id: "{{ job_id_pds }}" - register: result - - - name: Response for get job output for job "{{ job_id_pds }}" from PDS - member - debug: - msg: "{{ result }}" - - # +------------------------------------------------------------------------- - # | JCL from a USS file can also be submitted as a job by leveraging the - # | zos_job_submit module. - # +------------------------------------------------------------------------- - - - name: Submit {{ job_name }} JCL located on target - in {{ tgt_tmp_dir }}/ansible/jcl/{{ job_name }} - zos_job_submit: - src: "{{ tgt_tmp_dir }}/ansible/jcl/{{ job_name }}" - location: uss - register: result - - - name: Response for submit {{ job_name }} JCL located on target - in {{ tgt_tmp_dir }}/ansible/jcl/{{ job_name }} - debug: - msg: - - "{{ result }}" - - - name: Setting fact `job_id_uss` for id of job submitted above - set_fact: - job_id_uss: "{{ result.jobs[0].job_id }}" - - - name: Fact `job_id_uss` set with value - debug: - msg: "{{ job_id_uss }}" - - - name: Query the submitted job {{ job_name }} on USS target - zos_job_query: - job_id: "{{ job_id_uss }}" - register: result - - - name: Response for Query the submitted job {{ job_name }} - on USS target - debug: - msg: "{{ result }}" - - # +------------------------------------------------------------------------- - # | The following section is commented out because it creates identical - # | output to when the same job was submited earlier in the playbook - # | from the PDS member. Feel free to uncomment the following section to - # | view the full job output. - # +------------------------------------------------------------------------- - - # - name: Get job output for job {{ job_id_uss }} on USS target - # zos_job_output: - # job_id: "{{ job_id_uss }}" - # register: result - - # - name: Response for Get {{ job_name }} job output on USS target - # debug: - # msg: "{{ result }}" - - ############################################################################ - # +------------------------------------------------------------------------- - # | Clean up - remove JCL files from USS, delete PDS. - # +------------------------------------------------------------------------- - ############################################################################ - - - name: Remove {{ job_name }} JCL and folder on USS target - file: - path: "{{ tgt_tmp_dir }}/ansible" - state: absent - register: result - - - name: Response for remove {{ job_name }} JCL and folder on - USS target - debug: - msg: "{{ result }}" - - - name: Delete data set {{ data_set_name }} - zos_data_set: - name: "{{ data_set_name }}" - state: absent - register: result - - - name: Response for delete data set {{ data_set_name }} - debug: - msg: "{{ result }}" +############################################################################### +# © Copyright IBM Corporation 2020, 2021, 2022 +############################################################################### + +############################################################################### +# This sample playbook demonstrates how to submit jobs, query and retrieve job +# output using modules included in the Red Hat Ansible Certified Content for +# IBM Z core collection. +# +# Usage: +# ansible-playbook -i +# +# Example: +# ansible-playbook -i inventories submit_query_retrieve.yaml +# +# Additional facts for this playbook can be configured to override the defaults +# by reviewing the "Fact setting" section of this playbook, for example, +# `data_set_name`. +# +# Requirements: +# IBM z/OS core collection 1.3.0 or later. +# +# Configure: +# tgt_tmp_dir - this is the USS directory on the target which will be written +# to for this example. +# Optional: +# data_set_name - this is the data set name that will be created during +# execution of this sample. +# job_name - this is the job name what will be used in this sample, if you +# change the HELLO.JCL job name, you must update this variable +############################################################################### + +--- +- hosts: zos_host + collections: + - ibm.ibm_zos_core + gather_facts: no + vars: + tgt_tmp_dir: "/tmp" + job_name: "HELLO" + environment: "{{ environment_vars }}" + + tasks: + # ########################################################################## + # Generate a temporary data set name + # ########################################################################## + - name: Create temp data set name + command: "mvstmp {{ ansible_user | upper }}" + register: tmp_ds + + # ########################################################################## + # Fact setting for use by this playbook + # ########################################################################## + - name: Setting fact `data_set_name` for use by this sample + set_fact: + data_set_name: "{{ tmp_ds.stdout }}" + + - name: Fact `data_set_name` set with value + debug: + msg: "{{ data_set_name }}" + + ############################################################################ + # Modules zos_data_set, zos_job_submit, zos_job_query, zos_job_output, + # zos_data_set, zos_copy. + ############################################################################ + # +------------------------------------------------------------------------- + # | Create a data set and member, create JCL on USS target, copy USS JCL to + # | data set, submit JCL in data set, query job, get job output. Repeat the + # | process with JCL submitted from USS file. + # +------------------------------------------------------------------------- + ############################################################################ + + - name: Create a PDS data set {{ data_set_name }} + zos_data_set: + name: "{{ data_set_name }}" + type: pds + space_primary: 5 + space_type: m + format: fb + record_length: 80 + replace: yes + register: result + + - name: Response for create a PDS data set {{ data_set_name }} + debug: + msg: "{{ result }}" + + # +------------------------------------------------------------------------- + # | "with_sequence" is a type of conditional Ansible loop. The loop here + # | only runs for a single iteration and is included solely to demonstrate + # | how easy it is to perform the operation for multiple members. The result + # | is that single member 'MEM1' is created. + # +------------------------------------------------------------------------- + + - name: Create a PDS member and replace if member exist + zos_data_set: + name: "{{ data_set_name }}(MEM{{ item }})" + type: member + replace: yes + with_sequence: count=1 + register: result + + - name: Response for create a PDS member and replace if member exist + debug: + msg: "{{ result }}" + + - name: Ensure JCL folder exists in USS to manage JCL + file: + path: "{{ tgt_tmp_dir }}/ansible/jcl" + state: directory + + - name: Response for ensure JCL folder exists in USS to manage JCL + debug: + msg: "{{ result }}" + + - name: Write HELLO JCL to USS in {{ tgt_tmp_dir }}/ansible/jcl/HELLO" + on target {{ inventory_hostname }} + zos_copy: + src: "{{ playbook_dir }}/files/HELLO.jcl" + dest: "{{ tgt_tmp_dir }}/ansible/jcl/HELLO" + force: true + register: result + + - name: Response for write HELLO JCL to USS + in {{ tgt_tmp_dir }}/ansible/jcl/HELLO" on + target {{ inventory_hostname }} + debug: + msg: "{{ result }}" + + - name: Populate {{ data_set_name }} member with data from USS file + in {{ tgt_tmp_dir }}/ansible/jcl + zos_copy: + src: "{{ tgt_tmp_dir }}/ansible/jcl/HELLO" + remote_src: True + dest: "{{ data_set_name }}(MEM1)" + force: true + register: result + + - name: Response for populate {{ data_set_name }} member with data from + USS file in {{ tgt_tmp_dir }}/ansible/jcl + debug: + msg: "{{ result }}" + + # +------------------------------------------------------------------------- + # | Similar to how the PDS member was created earlier, this is another + # | Ansible loop with a single iteration designed to showcase how multiple + # | jobs could be submitted under a single task for members of a single PDS + # +------------------------------------------------------------------------- + + - name: Submit the JCL {{ data_set_name }}(MEM1) + zos_job_submit: + src: "{{ data_set_name }}(MEM{{ item }})" + location: data_set + wait_time_s: 155 + register: result + with_sequence: count=1 + + - name: Response for submit the JCL {{ data_set_name }}.(MEM1) + debug: + msg: "{{ result }}" + + # +------------------------------------------------------------------------- + # | There is a list of results returned by the zos_job_submit module. Each + # | result contains attributes from the job it refers to including job_id, + # | job_name, owner, content, and return codes. Since only one job was + # | submitted, it will be the first (and only) job in the results list. + # +------------------------------------------------------------------------- + + - name: Setting fact `job_id_pds` for id of job submitted above + set_fact: + job_id_pds: "{{ result.results[0].jobs[0].job_id }}" + + - name: Fact `job_id_pds` set with value + debug: + msg: "{{ job_id_pds }}" + + - name: Query submitted job by job_id + zos_job_query: + job_id: "{{ job_id_pds }}" + register: result + + - name: Response for query submitted job by job_id + debug: + msg: "{{ result }}" + + - name: Get job output for job {{ job_id_pds }} from PDS member + zos_job_output: + job_id: "{{ job_id_pds }}" + register: result + + - name: Response for get job output for job "{{ job_id_pds }}" from PDS + member + debug: + msg: "{{ result }}" + + # +------------------------------------------------------------------------- + # | JCL from a USS file can also be submitted as a job by leveraging the + # | zos_job_submit module. + # +------------------------------------------------------------------------- + + - name: Submit {{ job_name }} jcl located on target + in {{ tgt_tmp_dir }}/ansible/jcl/{{ job_name }} + zos_job_submit: + src: "{{ tgt_tmp_dir }}/ansible/jcl/{{ job_name }}" + location: uss + wait_time_s: 40 + register: result + + - name: Response for submit {{ job_name }} jcl located on target + in {{ tgt_tmp_dir }}/ansible/jcl/{{ job_name }} + debug: + msg: + - "{{ result }}" + + - name: Setting fact `job_id_uss` for id of job submitted above + set_fact: + job_id_uss: "{{ result.jobs[0].job_id }}" + + # job_id_uss: "{{ result.job_id }}" + + - name: Fact `job_id_uss` set with value + debug: + msg: "{{ job_id_uss }}" + + - name: Query the submitted job {{ job_name }} on USS target + zos_job_query: + job_id: "{{ job_id_uss }}" + register: result + + - name: Response for Query the submitted job {{ job_name }} + on USS target + debug: + msg: "{{ result }}" + + # +------------------------------------------------------------------------- + # | The following section is commented out because it creates identical + # | output to when the same job was submited earlier in the playbook + # | from the PDS member. Feel free to uncomment the following section to + # | view the full job output. + # +------------------------------------------------------------------------- + + # - name: Get job output for job {{ job_id_uss }} on USS target + # zos_job_output: + # job_id: "{{ job_id_uss }}" + # register: result + + # - name: Response for Get {{ job_name }} job output on USS target + # debug: + # msg: "{{ result }}" + + ############################################################################ + # +------------------------------------------------------------------------- + # | Clean up - remove JCL files from USS, delete PDS. + # +------------------------------------------------------------------------- + ############################################################################ + + - name: Remove {{ job_name }} JCL and folder on USS target + file: + path: "{{ tgt_tmp_dir }}/ansible" + state: absent + register: result + + - name: Response for remove {{ job_name }} JCL and folder on + USS target + debug: + msg: "{{ result }}" + + - name: Delete data set {{ data_set_name }} + zos_data_set: + name: "{{ data_set_name }}" + state: absent + register: result + + - name: Response for delete data set {{ data_set_name }} + debug: + msg: "{{ result }}" diff --git a/zos_concepts/zos_script/zos_script.yml b/zos_concepts/zos_script/zos_script.yml index e0acbefe..cc464508 100644 --- a/zos_concepts/zos_script/zos_script.yml +++ b/zos_concepts/zos_script/zos_script.yml @@ -11,18 +11,22 @@ # # Example: # ansible-playbook -i inventories zos_script.yml +# ansible-playbook -i inventories zos_script.yml --tags rexx_script # # Requirements: # IBM z/OS core collection 1.8.0 or later. # # Configure: # python_script_dir - Directory where the Python script will be run (mostly -# to test the chdir option in zos_script). -# show_current_time - Whether to print the current time on the managed node -# when running the CATALOG script. -# show_header - Whether to print a header when running the CATALOG script. -# rexx_header - Content of the header that will be used in the CATALOG -# script. +# to test the chdir module option). +# Optional: +# use_custom_msg - Whether to print the custom message or use the default one +# when running the templated shell script. +# custom_msg - Content of the message printed in the templated shell script. +# some_num - A number used as a range to loop over in the templated shell +# script. +# fav_programming_languages - A list of strs which are sorted and then printed +# out in the templated shell script. ############################################################################### - name: Sample zos_script playbook. @@ -33,51 +37,116 @@ environment: '{{ environment_vars }}' vars: - python_script_dir: "/u/user_name" + python_script_dir: "/u/ibmuser" - show_current_time: true - show_header: true - rexx_header: "#--- Current catalog information ---#" + use_custom_msg: True + custom_msg: "This is a custom Hello World message!!" + some_num: 3 + fav_programming_languages: + - C + - rexx + - python + - cobol tasks: - # For this task, we can give arguments directly to the script by writing - # them after the command that we're interested in running on the managed - # node. Make sure to replace the values of the arguments. - - name: Run REXX script to get a job's information. + + ############################################################################## + # This first block shows how a value can be passed directly to a script as a + # command line arg by using the 'cmd' module option. + # Notice in the output that the value of 'ansible_user' is present. + ############################################################################## + + - name: Pass a value to a script in-line as a command line arg with the 'cmd' + module option and print the output. + tags: rexx_script + block: + + - name: Run a Rexx script which prints a greeting to the user and prints the + current working directory on the remote system. ibm.ibm_zos_core.zos_script: - cmd: "{{ playbook_dir }}/files/JOB_INFO JOBID=ID OWNER=OWNER JOBNAME=NAME" + cmd: "{{ playbook_dir }}/files/HELLO_USER_PRINT_CWD.rexx name={{ ansible_user }}" remote_src: false - register: job_output + register: hello_user_output - - name: See Job information. + - name: Print the Rexx script output. ansible.builtin.debug: - msg: "{{ job_output }}" + msg: "{{ hello_user_output.stdout_lines }}" + + + ############################################################################## + # This next block explores module options: 'executable' and 'chdir'. + # The default behavior of the zos_script module is to run a script as Rexx. + # The task also passes in a command line arg like the previous one did. + # The 'executable' module option enables running other types of scripts by + # passing in a path to the appropriate executable. The Ansible inventory + # variable 'ansible_python_interpreter' is typically used to specify the + # remote path to the python interpreter used by Ansible to run modules. The + # task below reuses this variable in the 'executable' module option since a + # working version of python is guaranteed to be at that location (given that + # the rest of this playbook ran). + # The value set in the module option 'chdir' is set to the value defined above + # in the vars section of this playbook. + # Notice in the output that the value of 'current working directory' matches + # what was specified in module option 'chdir'. Compare this to the output from + # the previous block where the 'chdir' module option was not specified. + ############################################################################## - # For this task, we're trying out 'executable' and 'chdir' to have more - # control over the way a script is run. - - name: Run Python script in a given directory. + - name: Specify a remote directory to run a script in and specify which + program to use to run the script, then print the output. + tags: python_script + block: + + - name: Run a Python script, with a specified python executable, in a + specified remote directory, which prints a greeting to the user and + prints the current working directory on the remote system. ibm.ibm_zos_core.zos_script: - cmd: "{{ playbook_dir }}/files/list_dir.py" + cmd: "{{ playbook_dir }}/files/hello_user_print_cwd.py --name={{ ansible_user }}" chdir: "{{ python_script_dir }}" executable: "{{ ansible_python_interpreter }}" remote_src: false register: python_output - - name: See Python output. + - name: Print the Python script output. ansible.builtin.debug: - msg: "{{ python_output }}" + msg: "{{ python_output.stdout_lines }}" + + + ############################################################################## + # This last block runs a templated shell script. + # First, the script greets the user, prints the directory the playbook was run + # from and prints a number all for which the values are rendered in jinja and + # substitued in. The user and playbook directory values are populated by + # accessing the Ansible magic variables 'ansible_user' and 'playbook_dir'. The + # number is defined above as 'some_num' in the vars section of this playbook. + # Next the script assigns a string value to a variable '$msg' based on a + # jinja-templated conditional block. The value of the conditional is based on + # the boolean value 'use_custom_msg' defined above in the vars section of this + # playbook. + # Then the script uses a jinja-templated loop to iterate over a range from 1 + # up to and including the number defined in 'some_num'. Each iteration prints + # out the value of the loop variable as well as the determined value of + # '$msg'. + # Lastly, the script uses another jinja-templated loop to iterate through the + # 'fav_programming_languages' list, sorts it in reverse alphabetical order, + # and prints the sorted list. + ############################################################################## + + - name: Run a templated shell script which leverages jinja templating to + substitute values in a string, to set up and evaluate a conditional + block, and to loop through both a range and a list. + tags: shell_script + block: - # For the last task, we're trying out a template of a REXX script. See - # the variables defined above. - - name: Run template of a REXX script. + - name: Run a templated shell script. ibm.ibm_zos_core.zos_script: - cmd: "{{ playbook_dir }}/files/CATALOG" + cmd: "{{ playbook_dir }}/files/templated_loops_and_conditional_sample.sh" + executable: /bin/sh remote_src: false use_template: true template_parameters: keep_trailing_newline: true - register: template_output + register: output - - name: See script's output. + - name: Print shell script output. ansible.builtin.debug: - msg: "{{ template_output }}" + msg: "{{ output.stdout_lines }}"