From c8aef1958295c7312382f05685541b26c8f08616 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= Date: Mon, 26 Aug 2024 12:07:33 -0600 Subject: [PATCH 01/26] First iteration --- .../modules/test_zos_blockinfile_func.py | 164 ++++++++---------- 1 file changed, 69 insertions(+), 95 deletions(-) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 84d0850da..f01efeaa9 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -18,10 +18,10 @@ from shellescape import quote import pytest from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.utils import get_random_file_name __metaclass__ = type -TEST_FOLDER_BLOCKINFILE = "/tmp/ansible-core-tests/zos_blockinfile/" c_pgm="""#include #include @@ -434,18 +434,18 @@ # not supported data set types NS_DS_TYPE = ['esds', 'rrds', 'lds'] -USS_BACKUP_FILE = "/tmp/backup.tmp" -BACKUP_OPTIONS = [None, "BLOCKIF.TEST.BACKUP", "BLOCKIF.TEST.BACKUP(BACKUP)"] +TMP_DIRECTORY = "/tmp/" + +BACKUP_OPTIONS = [None, "SEQ", "MEM"] def set_uss_environment(ansible_zos_module, content, file): hosts = ansible_zos_module - hosts.all.shell(cmd=f"mkdir -p {TEST_FOLDER_BLOCKINFILE}") hosts.all.file(path=file, state="touch") hosts.all.shell(cmd=f"echo \"{content}\" > {file}") -def remove_uss_environment(ansible_zos_module): +def remove_uss_environment(ansible_zos_module, file): hosts = ansible_zos_module - hosts.all.shell(cmd="rm -rf" + TEST_FOLDER_BLOCKINFILE) + hosts.all.shell(cmd="rm " + file) def set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content): hosts = ansible_zos_module @@ -479,7 +479,7 @@ def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -491,7 +491,7 @@ def test_uss_block_insertafter_regex_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -502,7 +502,7 @@ def test_uss_block_insertbefore_regex_defaultmarker(ansible_zos_module): "block":"unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -514,7 +514,7 @@ def test_uss_block_insertbefore_regex_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -525,7 +525,7 @@ def test_uss_block_insertafter_eof_defaultmarker(ansible_zos_module): "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -537,7 +537,7 @@ def test_uss_block_insertafter_eof_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -548,7 +548,7 @@ def test_uss_block_insertbefore_bof_defaultmarker(ansible_zos_module): "block":"# this is file is for setting env vars", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -560,7 +560,7 @@ def test_uss_block_insertbefore_bof_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -575,7 +575,7 @@ def test_uss_block_insertafter_regex_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -587,7 +587,7 @@ def test_uss_block_insertafter_regex_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_REGEX_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @@ -603,7 +603,7 @@ def test_uss_block_insertbefore_regex_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -615,7 +615,7 @@ def test_uss_block_insertbefore_regex_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_REGEX_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -630,7 +630,7 @@ def test_uss_block_insertafter_eof_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -642,7 +642,7 @@ def test_uss_block_insertafter_eof_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -657,7 +657,7 @@ def test_uss_block_insertbefore_bof_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -669,7 +669,7 @@ def test_uss_block_insertbefore_bof_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -679,7 +679,7 @@ def test_uss_block_absent_defaultmarker(ansible_zos_module): "block":"", "state":"absent" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -691,7 +691,7 @@ def test_uss_block_absent_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -704,7 +704,7 @@ def test_uss_block_absent_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_CUSTOMMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -716,7 +716,7 @@ def test_uss_block_absent_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ABSENT finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -727,7 +727,7 @@ def test_uss_block_replace_insertafter_regex_defaultmarker(ansible_zos_module): "block":"ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -739,7 +739,7 @@ def test_uss_block_replace_insertafter_regex_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTAFTER finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -750,7 +750,7 @@ def test_uss_block_replace_insertbefore_regex_defaultmarker(ansible_zos_module): "block":"unset ZOAU_ROOT\nunset ZOAU_HOME\nunset ZOAU_DIR", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -762,7 +762,7 @@ def test_uss_block_replace_insertbefore_regex_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_INSERTBEFORE finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -773,7 +773,7 @@ def test_uss_block_replace_insertafter_eof_defaultmarker(ansible_zos_module): "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -785,7 +785,7 @@ def test_uss_block_replace_insertafter_eof_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_EOF_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -796,7 +796,7 @@ def test_uss_block_replace_insertbefore_bof_defaultmarker(ansible_zos_module): "block":"# this is file is for setting env vars", "state":"present" } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -808,7 +808,7 @@ def test_uss_block_replace_insertbefore_bof_defaultmarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_BOF_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -822,7 +822,7 @@ def test_uss_block_replace_insertafter_regex_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -834,7 +834,7 @@ def test_uss_block_replace_insertafter_regex_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_EOF_REGEX_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -848,7 +848,7 @@ def test_uss_block_replace_insertbefore_regex_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_CUSTOMMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -860,7 +860,7 @@ def test_uss_block_replace_insertbefore_regex_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_REPLACE_BOF_REGEX_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -874,7 +874,7 @@ def test_uss_block_replace_insertafter_eof_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_CUSTOMMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -886,7 +886,7 @@ def test_uss_block_replace_insertafter_eof_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -900,7 +900,7 @@ def test_uss_block_replace_insertbefore_bof_custommarker(ansible_zos_module): params["marker"] = '# {mark} IBM MANAGED BLOCK' params["marker_begin"] = 'OPEN' params["marker_end"] = 'CLOSE' - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_CUSTOMMARKER try: set_uss_environment(ansible_zos_module, content, full_path) @@ -912,7 +912,7 @@ def test_uss_block_replace_insertbefore_bof_custommarker(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTBEFORE_BOF_CUSTOM finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -924,7 +924,7 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): "state":"present", "indentation":16 } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -936,7 +936,7 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERT_WITH_INDENTATION finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) # Test case base on bug of dataset.blockifile # GH Issue #1258 @@ -944,7 +944,7 @@ def test_uss_block_insert_with_indentation_level_specified(ansible_zos_module): def test_uss_block_insert_with_doublequotes(ansible_zos_module): hosts = ansible_zos_module params = dict(insertafter="sleep 30;", block='cat "//OMVSADMI.CAT"\ncat "//OMVSADM.COPYMEM.TESTS" > test.txt', marker="// {mark} ANSIBLE MANAGED BLOCK", state="present") - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DOUBLEQUOTES try: set_uss_environment(ansible_zos_module, content, full_path) @@ -957,7 +957,7 @@ def test_uss_block_insert_with_doublequotes(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_DOUBLE_QUOTES finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss @@ -969,7 +969,7 @@ def test_uss_block_insertafter_eof_with_backup(ansible_zos_module): "state":"present", "backup":True } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -984,20 +984,21 @@ def test_uss_block_insertafter_eof_with_backup(ansible_zos_module): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: ansible_zos_module.all.file(path=backup_name, state="absent") - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) @pytest.mark.uss def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): hosts = ansible_zos_module + uss_backup_file = get_random_file_name(dir=TMP_DIRECTORY, suffix=".tmp") params = { "insertafter":"EOF", "block":"export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", "state":"present", "backup":True, - "backup_name":USS_BACKUP_FILE + "backup_name":uss_backup_file } - full_path = TEST_FOLDER_BLOCKINFILE + inspect.stack()[0][3] + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: set_uss_environment(ansible_zos_module, content, full_path) @@ -1005,8 +1006,8 @@ def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - assert result.get("backup_name") == USS_BACKUP_FILE - cmd_str = f"cat {USS_BACKUP_FILE}" + assert result.get("backup_name") == uss_backup_file + cmd_str = f"cat {uss_backup_file}" results = ansible_zos_module.all.shell(cmd=cmd_str) for result in results.contacted.values(): assert result.get("stdout") == TEST_CONTENT @@ -1014,8 +1015,8 @@ def test_uss_block_insertafter_eof_with_backup_name(ansible_zos_module): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: - ansible_zos_module.all.file(path=USS_BACKUP_FILE, state="absent") - remove_uss_environment(ansible_zos_module) + ansible_zos_module.all.file(path=uss_backup_file, state="absent") + remove_uss_environment(ansible_zos_module, full_path) ######################### @@ -1332,7 +1333,10 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup "backup":True } if backup_name: - params["backup_name"] = backup_name + if backup_ds_name == "SEQ": + params["backup_name"] = get_tmp_ds_name() + else: + params["backup_name"] = get_tmp_ds_name() + "(MEM)" ds_name = get_tmp_ds_name() temp_file = "/tmp/" + ds_name content = TEST_CONTENT @@ -1349,8 +1353,6 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup assert result.get("stdout") == EXPECTED_INSERTAFTER_EOF finally: remove_ds_environment(ansible_zos_module, ds_name) - if backup_name: - ansible_zos_module.all.zos_data_set(name="BLOCKIF.TEST.BACKUP", state="absent") if backup_ds_name != "": ansible_zos_module.all.zos_data_set(name=backup_ds_name, state="absent") @@ -1523,10 +1525,10 @@ def test_uss_encoding(ansible_zos_module, encoding): "state":"present" } params["encoding"] = encoding - full_path = TEST_FOLDER_BLOCKINFILE + encoding + full_path = get_random_file_name(dir=TMP_DIRECTORY) + encoding content = "SIMPLE LINE TO VERIFY" + ds_name = get_tmp_ds_name() try: - hosts.all.shell(cmd=f"mkdir -p {TEST_FOLDER_BLOCKINFILE}") hosts.all.file(path=full_path, state="touch") hosts.all.shell(cmd=f"echo \"{content}\" > {full_path}") hosts.all.zos_encode( @@ -1573,46 +1575,18 @@ def test_uss_encoding(ansible_zos_module, encoding): @pytest.mark.ds def test_special_characters_ds_insert_block(ansible_zos_module): hosts = ansible_zos_module - ds_type = dstype - insert_data = "Insert this string" - params = { - "insertafter":"SIMPLE", - "block":insert_data, - "state":"present" - } - params["encoding"] = encoding - ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name - content = "SIMPLE LINE TO VERIFY" + params = dict(insertafter="eof", block="ZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=\\$ZOAU_ROOT\nZOAU_DIR=\\$ZOAU_ROOT", state="present") + ds_name = get_tmp_ds_name(5, 5, symbols=True) + backup = get_tmp_ds_name(6, 6, symbols=True) try: - hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") - hosts.all.zos_encode( - src=temp_file, - dest=temp_file, - from_encoding="IBM-1047", - to_encoding=params["encoding"] - ) - hosts.all.zos_data_set(name=ds_name, type=ds_type) - if ds_type in ["pds", "pdse"]: - ds_full_name = ds_name + "(MEM)" - hosts.all.zos_data_set(name=ds_full_name, state="present", type="member") - cmd_str = f"cp -CM {quote(temp_file)} \"//'{ds_full_name}'\"" - else: - ds_full_name = ds_name - cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " - hosts.all.shell(cmd=cmd_str) - hosts.all.shell(cmd="rm -rf " + temp_file) - params["path"] = ds_full_name + result = hosts.all.zos_data_set(name=ds_name, type="seq", state="present") + + params["src"] = ds_name results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - hosts.all.zos_encode( - src=ds_full_name, - dest=ds_full_name, - from_encoding=params["encoding"], - to_encoding="IBM-1047" - ) - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["path"])) + src = ds_name.replace('$', "\$") + results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(src)) for result in results.contacted.values(): assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" From 0ecca1816a0abb4ad20d4a19cf361e28acb667a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= Date: Wed, 28 Aug 2024 13:04:15 -0600 Subject: [PATCH 02/26] Fix blockinfile test --- .../modules/test_zos_blockinfile_func.py | 49 +++---------------- 1 file changed, 8 insertions(+), 41 deletions(-) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index f01efeaa9..a21c74ce9 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1519,57 +1519,24 @@ def test_special_characters_ds_insert_block(ansible_zos_module): def test_uss_encoding(ansible_zos_module, encoding): hosts = ansible_zos_module insert_data = "Insert this string" - params = { - "insertafter":"SIMPLE", - "block":insert_data, - "state":"present" - } + params = dict(insertafter="SIMPLE", block=insert_data, state="present") params["encoding"] = encoding - full_path = get_random_file_name(dir=TMP_DIRECTORY) + encoding + full_path = get_random_file_name(dir=TMP_DIRECTORY) content = "SIMPLE LINE TO VERIFY" - ds_name = get_tmp_ds_name() try: hosts.all.file(path=full_path, state="touch") - hosts.all.shell(cmd=f"echo \"{content}\" > {full_path}") - hosts.all.zos_encode( - src=full_path, - dest=full_path, - from_encoding="IBM-1047", - to_encoding=params["encoding"] - ) + hosts.all.shell(cmd="echo \"{0}\" > {1}".format(content, full_path)) + hosts.all.zos_encode(src=full_path, dest=full_path, from_encoding="IBM-1047", to_encoding=params["encoding"]) params["path"] = full_path results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["src"])) - for result in results.contacted.values(): - assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" - - params["src"] = ds_name + "(-1)" - results = hosts.all.zos_blockinfile(**params) - for result in results.contacted.values(): - assert result.get("changed") == 1 - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(params["src"])) - for result in results.contacted.values(): - assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" - - params_w_bck = dict(insertafter="eof", block="export ZOAU_ROOT\nexport ZOAU_HOME\nexport ZOAU_DIR", state="present", backup=True, backup_name=ds_name + "(+1)") - params_w_bck["src"] = ds_name + "(-1)" - results = hosts.all.zos_blockinfile(**params_w_bck) - for result in results.contacted.values(): - assert result.get("changed") == 1 - assert result.get("rc") == 0 - backup = ds_name + "(0)" - results = hosts.all.shell(cmd="cat \"//'{0}'\" ".format(backup)) - for result in results.contacted.values(): - assert result.get("stdout") == "# BEGIN ANSIBLE MANAGED BLOCK\nZOAU_ROOT=/mvsutil-develop_dsed\nZOAU_HOME=$ZOAU_ROOT\nZOAU_DIR=$ZOAU_ROOT\n# END ANSIBLE MANAGED BLOCK" - - params["src"] = ds_name + "(-3)" - results = hosts.all.zos_blockinfile(**params) + results = hosts.all.shell(cmd="cat {0}".format(params["path"])) for result in results.contacted.values(): - assert result.get("changed") == 0 + assert result.get("stdout") == EXPECTED_ENCODING finally: - hosts.all.shell(cmd="""drm "ANSIBLE.*" """) + remove_uss_environment(ansible_zos_module) + @pytest.mark.ds From 412bda505eee70967ae84b00f608e809235e4ac2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= Date: Wed, 28 Aug 2024 14:18:18 -0600 Subject: [PATCH 03/26] Fix remove --- tests/functional/modules/test_zos_blockinfile_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index a21c74ce9..66ad95a79 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1535,7 +1535,7 @@ def test_uss_encoding(ansible_zos_module, encoding): for result in results.contacted.values(): assert result.get("stdout") == EXPECTED_ENCODING finally: - remove_uss_environment(ansible_zos_module) + remove_uss_environment(ansible_zos_module, full_path) From 336f2399f5d113d8963114346994453209cbf3ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= Date: Wed, 28 Aug 2024 14:42:15 -0600 Subject: [PATCH 04/26] Add fragment --- changelogs/fragments/1676-portability_zos_blockinfile.yml | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 changelogs/fragments/1676-portability_zos_blockinfile.yml diff --git a/changelogs/fragments/1676-portability_zos_blockinfile.yml b/changelogs/fragments/1676-portability_zos_blockinfile.yml new file mode 100644 index 000000000..bb0ee4b9c --- /dev/null +++ b/changelogs/fragments/1676-portability_zos_blockinfile.yml @@ -0,0 +1,3 @@ +trivial: + - zos_blockinfile- Remove the use of hard coded dataset and files names. + (https://github.com/ansible-collections/ibm_zos_core/pull/1676). \ No newline at end of file From 9381219eca9648efd399869c59e362e8a90485a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= Date: Thu, 29 Aug 2024 10:52:07 -0600 Subject: [PATCH 05/26] Remove remaining --- .../modules/test_zos_blockinfile_func.py | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 66ad95a79..712d37c01 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -14,7 +14,6 @@ from __future__ import absolute_import, division, print_function import time import re -import inspect from shellescape import quote import pytest from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name @@ -1035,7 +1034,7 @@ def test_ds_block_insertafter_regex(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1061,7 +1060,7 @@ def test_ds_block_insertbefore_regex(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1087,7 +1086,7 @@ def test_ds_block_insertafter_eof(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1113,7 +1112,7 @@ def test_ds_block_insertbefore_bof(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1139,7 +1138,7 @@ def test_ds_block_replace_insertafter_regex(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1165,7 +1164,7 @@ def test_ds_block_replace_insertbefore_regex(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1191,7 +1190,7 @@ def test_ds_block_replace_insertafter_eof(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1217,7 +1216,7 @@ def test_ds_block_replace_insertbefore_bof(ansible_zos_module, dstype): "state":"present" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1242,7 +1241,7 @@ def test_ds_block_absent(ansible_zos_module, dstype): "state":"absent" } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT_DEFAULTMARKER try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1305,7 +1304,7 @@ def test_ds_block_insert_with_indentation_level_specified(ansible_zos_module, ds "indentation":16 } ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1338,7 +1337,7 @@ def test_ds_block_insertafter_eof_with_backup(ansible_zos_module, dstype, backup else: params["backup_name"] = get_tmp_ds_name() + "(MEM)" ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) @@ -1602,7 +1601,7 @@ def test_ds_block_insertafter_nomatch_eof_insert(ansible_zos_module): } params["insertafter"] = 'SOME_NON_EXISTING_PATTERN' ds_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT try: ds_full_name = set_ds_environment(ansible_zos_module, temp_file, ds_name, ds_type, content) From 5c435b70e3ded91ef86ac98f816a0436adcf0f37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= Date: Thu, 29 Aug 2024 12:14:41 -0600 Subject: [PATCH 06/26] Fix change --- tests/functional/modules/test_zos_blockinfile_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 712d37c01..92c1d4077 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1279,7 +1279,7 @@ def test_ds_tmp_hlq_option(ansible_zos_module): hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " hosts.all.shell(cmd=cmd_str) - hosts.all.shell(cmd="rm -rf " + "/tmp/zos_lineinfile/") + hosts.all.shell(cmd="rm " + ds_full_name) results = hosts.all.shell(cmd=f"cat \"//'{ds_full_name}'\" | wc -l ") for result in results.contacted.values(): assert int(result.get("stdout")) != 0 From a10e7da109b5ab2299d216f15ac4c65818d3d4f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= Date: Thu, 29 Aug 2024 13:18:45 -0600 Subject: [PATCH 07/26] Fix blockinfile --- .../modules/test_zos_blockinfile_func.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 92c1d4077..e28f6a333 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -16,7 +16,10 @@ import re from shellescape import quote import pytest -from ibm_zos_core.tests.helpers.dataset import get_tmp_ds_name +from ibm_zos_core.tests.helpers.dataset import ( + get_tmp_ds_name, + get_random_q, +) from ibm_zos_core.tests.helpers.utils import get_random_file_name __metaclass__ = type @@ -1261,15 +1264,16 @@ def test_ds_tmp_hlq_option(ansible_zos_module): # This TMPHLQ only works with sequential datasets hosts = ansible_zos_module ds_type = "seq" + hlq = get_random_q() params={ "insertafter":"EOF", "block":"export ZOAU_ROOT\n", "state":"present", "backup":True, - "tmp_hlq":"TMPHLQ" + "tmp_hlq": hlq } kwargs = { - "backup_name":r"TMPHLQ\.." + "backup_name":r"{hlq}\.." } content = TEST_CONTENT try: @@ -1518,7 +1522,11 @@ def test_special_characters_ds_insert_block(ansible_zos_module): def test_uss_encoding(ansible_zos_module, encoding): hosts = ansible_zos_module insert_data = "Insert this string" - params = dict(insertafter="SIMPLE", block=insert_data, state="present") + params = { + "insertafter":"SIMPLE", + "block":insert_data, + "state":"present" + } params["encoding"] = encoding full_path = get_random_file_name(dir=TMP_DIRECTORY) content = "SIMPLE LINE TO VERIFY" From 411858db5368e051a3e805c86d8523bc94ee190f Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Mon, 2 Sep 2024 18:02:04 -0600 Subject: [PATCH 08/26] Add use of tmphlq to dataset utils class --- plugins/module_utils/data_set.py | 8 ++++++-- plugins/module_utils/mvs_cmd.py | 12 +++++++++--- plugins/modules/zos_lineinfile.py | 2 +- 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 7b81fe2d1..58b72a54f 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -1702,19 +1702,21 @@ def _build_volume_string_iehprogm(volumes): class DataSetUtils(object): - def __init__(self, data_set): + def __init__(self, data_set, tmphlq=None): """A standard utility to gather information about a particular data set. Note that the input data set is assumed to be cataloged. Arguments: data_set {str} -- Name of the input data set + tmphlq {str} -- High Level Qualifier for temporary datasets. """ self.module = AnsibleModuleHelper(argument_spec={}) self.data_set = data_set.upper() self.path = data_set self.is_uss_path = "/" in data_set self.ds_info = dict() + self.tmphlq = tmphlq if not self.is_uss_path: self.ds_info.update(self._gather_data_set_info()) @@ -1845,7 +1847,9 @@ def _gather_data_set_info(self): result = dict() self.data_set = self.data_set.upper().replace("\\", '') listds_rc, listds_out, listds_err = mvs_cmd.ikjeft01( - " LISTDS '{0}'".format(self.data_set), authorized=True + " LISTDS '{0}'".format(self.data_set), + authorized=True, + tmphlq=self.tmphlq ) if listds_rc == 0: diff --git a/plugins/module_utils/mvs_cmd.py b/plugins/module_utils/mvs_cmd.py index 560184477..cd27d83ec 100644 --- a/plugins/module_utils/mvs_cmd.py +++ b/plugins/module_utils/mvs_cmd.py @@ -174,7 +174,7 @@ def idcams(cmd, dds=None, authorized=False): return _run_mvs_command("IDCAMS", cmd.upper(), dds, authorized) -def ikjeft01(cmd, dds=None, authorized=False): +def ikjeft01(cmd, dds=None, authorized=False, tmphlq=None): """IKJEFT01 is the TSO/E program. You can use it whenever you wish to perform a TSO function within a batch job. It allows you to perform any TSO function. For a general list of all TSO functions, type TSO HELP. Additionally, @@ -191,13 +191,15 @@ def ikjeft01(cmd, dds=None, authorized=False): authorized : bool Whether the command should be run in authorized mode. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- tuple(int, str, str) A tuple of return code, stdout and stderr. """ - return _run_mvs_command("IKJEFT01", cmd, dds, authorized) + return _run_mvs_command("IKJEFT01", cmd, dds, authorized, tmphlq=tmphlq) def iehlist(cmd, dds=None, authorized=False): @@ -262,7 +264,7 @@ def adrdssu(cmd, dds=None, authorized=False): return _run_mvs_command("ADRDSSU", cmd, dds, authorized) -def _run_mvs_command(pgm, cmd, dd=None, authorized=False): +def _run_mvs_command(pgm, cmd, dd=None, authorized=False, tmphlq=None): """Run a particular MVS command. Parameters @@ -279,6 +281,8 @@ def _run_mvs_command(pgm, cmd, dd=None, authorized=False): authorized : bool Indicates whether the MVS program should run as authorized. (Default {False}) + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -296,6 +300,8 @@ def _run_mvs_command(pgm, cmd, dd=None, authorized=False): mvscmd = "mvscmd" if authorized: mvscmd += "auth" + if tmphlq: + mvscmd += " -Q={0}".format(tmphlq) mvscmd += " --pgm={0} --{1}=* --{2}=stdin".format(pgm, sysprint, sysin) if dd: for k, v in dd.items(): diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index c5f262fe0..83ccefc04 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -674,7 +674,7 @@ def main(): if data_set.DataSet.is_gds_relative_name(src) and is_gds is False: module.fail_json(msg="{0} does not exist".format(src)) - ds_utils = data_set.DataSetUtils(src) + ds_utils = data_set.DataSetUtils(src, tmphlq=tmphlq) # Check if dest/src exists if not ds_utils.exists(): From 786a24280eaf6258c29628e52f8521e68b1b1985 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Tue, 3 Sep 2024 10:54:05 -0600 Subject: [PATCH 09/26] Update tmphlq in modules --- plugins/modules/zos_blockinfile.py | 2 +- plugins/modules/zos_mount.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index ab6d2a0dd..4d55e5475 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -772,7 +772,7 @@ def main(): if data_set.DataSet.is_gds_relative_name(src): module.fail_json(msg="{0} does not exist".format(src)) - ds_utils = data_set.DataSetUtils(src) + ds_utils = data_set.DataSetUtils(src, tmphlq=tmphlq) if not ds_utils.exists(): message = "{0} does NOT exist".format(str(src)) module.fail_json(msg=message) diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index 85f4638aa..0b1377d31 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -587,7 +587,7 @@ def mt_backupOper(module, src, backup, tmphlq=None): Data set type is NOT supported. """ # analysis the file type - ds_utils = data_set.DataSetUtils(src) + ds_utils = data_set.DataSetUtils(src, tmphlq=tmphlq) file_type = ds_utils.ds_type() if file_type != "USS" and file_type not in mt_DS_TYPE: message = "{0} data set type is NOT supported".format(str(file_type)) @@ -818,7 +818,7 @@ def run_module(module, arg_def): ) # data set to be mounted/unmounted must exist - fs_du = data_set.DataSetUtils(src) + fs_du = data_set.DataSetUtils(src, tmphlq=tmphlq) fs_exists = fs_du.exists() if fs_exists is False: module.fail_json( @@ -1033,7 +1033,7 @@ def run_module(module, arg_def): stderr = "Mount called on data set that is already mounted.\n" if write_persistent and module.check_mode is False: - fst_du = data_set.DataSetUtils(data_store) + fst_du = data_set.DataSetUtils(data_store, tmphlq=tmphlq) fst_exists = fst_du.exists() if fst_exists is False: module.fail_json( From bcd7772ab8b6a3cc9c8dbacbf12a2b5dea027fd1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= Date: Tue, 3 Sep 2024 11:05:16 -0600 Subject: [PATCH 10/26] Remove all tmp occurrences --- .../modules/test_zos_blockinfile_func.py | 39 ++++++++++--------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index e28f6a333..d3e1965c6 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -43,7 +43,7 @@ call_c_jcl="""//PDSELOCK JOB MSGCLASS=A,MSGLEVEL=(1,1),NOTIFY=&SYSUID,REGION=0M //LOCKMEM EXEC PGM=BPXBATCH //STDPARM DD * -SH /tmp/disp_shr/pdse-lock '{0}({1})' +SH {2}pdse-lock '{0}({1})' //STDIN DD DUMMY //STDOUT DD SYSOUT=* //STDERR DD SYSOUT=* @@ -1278,7 +1278,7 @@ def test_ds_tmp_hlq_option(ansible_zos_module): content = TEST_CONTENT try: ds_full_name = get_tmp_ds_name() - temp_file = "/tmp/" + ds_full_name + temp_file = get_random_file_name(dir=TMP_DIRECTORY) hosts.all.zos_data_set(name=ds_full_name, type=ds_type, replace=True) hosts.all.shell(cmd=f"echo \"{content}\" > {temp_file}") cmd_str = f"cp {quote(temp_file)} \"//'{ds_full_name}'\" " @@ -1294,6 +1294,7 @@ def test_ds_tmp_hlq_option(ansible_zos_module): assert re.match(kwargs.get(key), result.get(key)) finally: hosts.all.zos_data_set(name=ds_full_name, state="absent") + hosts.all.file(name=temp_file, state="absent") @pytest.mark.ds @@ -1375,7 +1376,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): "force":True } member_1, member_2 = "MEM1", "MEM2" - temp_file = f"/tmp/{member_2}" + temp_file = get_random_file_name(dir=TMP_DIRECTORY) content = TEST_CONTENT if ds_type == "seq": params["path"] = f"{default_data_set_name}.{member_2}" @@ -1410,14 +1411,15 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.file(path="/tmp/disp_shr/", state="directory") - hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") + path = get_random_file_name(suffix="/", dir=TMP_DIRECTORY) + hosts.all.file(path=path, state="directory") + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > {path}pdse-lock.c") hosts.all.shell( - cmd=f"echo \"{call_c_jcl.format(default_data_set_name, member_1)}\""+ - " > /tmp/disp_shr/call_c_pgm.jcl" + cmd=f"echo \"{call_c_jcl.format(default_data_set_name, member_1, path)}\""+ + " > {0}call_c_pgm.jcl".format(path) ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=path) + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir=path) time.sleep(5) # call lineinfile to see results results = hosts.all.zos_blockinfile(**params) @@ -1431,7 +1433,7 @@ def test_ds_block_insertafter_regex_force(ansible_zos_module, dstype): ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") - hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.shell(cmd='rm -r {0}'.format(path)) hosts.all.zos_data_set(name=default_data_set_name, state="absent") @@ -1679,7 +1681,7 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): "force":False } member_1, member_2 = "MEM1", "MEM2" - temp_file = f"/tmp/{member_2}" + temp_file = get_random_file_name(dir=TMP_DIRECTORY) params["path"] = f"{default_data_set_name}({member_2})" content = TEST_CONTENT try: @@ -1713,14 +1715,15 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): for result in results.contacted.values(): assert int(result.get("stdout")) != 0 # copy/compile c program and copy jcl to hold data set lock for n seconds in background(&) - hosts.all.file(path="/tmp/disp_shr/", state="directory") - hosts.all.shell(cmd=f"echo \"{c_pgm}\" > /tmp/disp_shr/pdse-lock.c") + path = get_random_file_name(suffix="/", dir=TMP_DIRECTORY) + hosts.all.file(path=path, state="directory") + hosts.all.shell(cmd=f"echo \"{c_pgm}\" > {path}pdse-lock.c") hosts.all.shell( - cmd=f"echo \"{call_c_jcl.format(default_data_set_name, member_1)}\""+ - " > /tmp/disp_shr/call_c_pgm.jcl" + cmd=f"echo \"{call_c_jcl.format(default_data_set_name, member_1, path)}\""+ + " > {0}call_c_pgm.jcl".format(path) ) - hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir="/tmp/disp_shr/") - hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir="/tmp/disp_shr/") + hosts.all.shell(cmd="xlc -o pdse-lock pdse-lock.c", chdir=path) + hosts.all.shell(cmd="submit call_c_pgm.jcl", chdir=path) time.sleep(5) # call lineinfile to see results results = hosts.all.zos_blockinfile(**params) @@ -1731,5 +1734,5 @@ def test_ds_block_insertafter_regex_fail(ansible_zos_module, dstype): ps_list_res = hosts.all.shell(cmd="ps -e | grep -i 'pdse-lock'") pid = list(ps_list_res.contacted.values())[0].get('stdout').strip().split(' ')[0] hosts.all.shell(cmd=f"kill 9 {pid.strip()}") - hosts.all.shell(cmd='rm -r /tmp/disp_shr') + hosts.all.shell(cmd='rm -r {0}'.format(path)) hosts.all.zos_data_set(name=default_data_set_name, state="absent") From 40d3b903b6c6210589818ae47f58fc6d12197563 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= Date: Tue, 3 Sep 2024 11:39:09 -0600 Subject: [PATCH 11/26] Fix tmp --- tests/functional/modules/test_zos_blockinfile_func.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index d3e1965c6..5c57c4a81 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1273,7 +1273,7 @@ def test_ds_tmp_hlq_option(ansible_zos_module): "tmp_hlq": hlq } kwargs = { - "backup_name":r"{hlq}\.." + "backup_name":"{0}".format(hlq) } content = TEST_CONTENT try: @@ -1291,7 +1291,7 @@ def test_ds_tmp_hlq_option(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): for key in kwargs: - assert re.match(kwargs.get(key), result.get(key)) + assert re.match(kwargs.get(key) in result.get(key)) finally: hosts.all.zos_data_set(name=ds_full_name, state="absent") hosts.all.file(name=temp_file, state="absent") From f74e2f2df39959b9daea1c48b4f0707e9fdb7cd2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Marcel=20Guti=C3=A9rrez=20Ben=C3=ADtez?= Date: Tue, 3 Sep 2024 11:52:06 -0600 Subject: [PATCH 12/26] Fix tmp --- tests/functional/modules/test_zos_blockinfile_func.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/modules/test_zos_blockinfile_func.py b/tests/functional/modules/test_zos_blockinfile_func.py index 5c57c4a81..fd03d17f6 100644 --- a/tests/functional/modules/test_zos_blockinfile_func.py +++ b/tests/functional/modules/test_zos_blockinfile_func.py @@ -1291,7 +1291,7 @@ def test_ds_tmp_hlq_option(ansible_zos_module): results = hosts.all.zos_blockinfile(**params) for result in results.contacted.values(): for key in kwargs: - assert re.match(kwargs.get(key) in result.get(key)) + assert kwargs.get(key) in result.get(key) finally: hosts.all.zos_data_set(name=ds_full_name, state="absent") hosts.all.file(name=temp_file, state="absent") From 790b10da5fee76a1845bc21d971a387c14efc46f Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Tue, 3 Sep 2024 14:34:17 -0600 Subject: [PATCH 13/26] Update tmphlq use in zos_encode --- plugins/modules/zos_encode.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index a17fcb7ed..b26cbe296 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -343,13 +343,15 @@ def check_pds_member(ds, mem): return check_rc -def check_mvs_dataset(ds): +def check_mvs_dataset(ds, tmphlq=None): """To call data_set utils to check if the MVS data set exists or not. Parameters ---------- ds : str Data set name. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -372,19 +374,21 @@ def check_mvs_dataset(ds): ) else: check_rc = True - ds_type = data_set.DataSetUtils(ds).ds_type() + ds_type = data_set.DataSetUtils(ds, tmphlq=tmphlq).ds_type() if not ds_type: raise EncodeError("Unable to determine data set type of {0}".format(ds)) return check_rc, ds_type -def check_file(file): +def check_file(file, tmphlq=None): """Check file is a USS file or an MVS data set. Parameters ---------- file : str File to check. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -406,7 +410,7 @@ def check_file(file): if "(" in ds: dsn = ds[: ds.rfind("(", 1)] mem = "".join(re.findall(r"[(](.*?)[)]", ds)) - rc, ds_type = check_mvs_dataset(dsn) + rc, ds_type = check_mvs_dataset(dsn, tmphlq=tmphlq) if rc: if ds_type == "PO": is_mvs = check_pds_member(dsn, mem) @@ -416,7 +420,7 @@ def check_file(file): "Data set {0} is not a partitioned data set".format(dsn) ) else: - is_mvs, ds_type = check_mvs_dataset(ds) + is_mvs, ds_type = check_mvs_dataset(ds, tmphlq=tmphlq) return is_uss, is_mvs, ds_type From 3c48ce0417dca1fa0b3f1f59c434e2aafd721f57 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Wed, 4 Sep 2024 10:19:19 -0600 Subject: [PATCH 14/26] Add tmphlq to data_set_exists --- plugins/module_utils/data_set.py | 38 +++++++++++++++++++++++--------- plugins/module_utils/vtoc.py | 20 ++++++++++++----- 2 files changed, 43 insertions(+), 15 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 58b72a54f..80da6e231 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -411,11 +411,12 @@ def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False ) @staticmethod - def data_set_cataloged(name, volumes=None): + def data_set_cataloged(name, volumes=None, tmphlq=None): """Determine if a data set is in catalog. Arguments: name (str) -- The data set name to check if cataloged. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If data is is cataloged. @@ -438,8 +439,15 @@ def data_set_cataloged(name, volumes=None): module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}')".format(name) + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin, errors='replace' + cmd, + data=stdin, + errors='replace' ) # The above 'listcat entries' command to idcams returns: @@ -450,7 +458,7 @@ def data_set_cataloged(name, volumes=None): raise MVSCmdExecError(rc, stdout, stderr) if volumes: - cataloged_volume_list = DataSet.data_set_cataloged_volume_list(name) or [] + cataloged_volume_list = DataSet.data_set_cataloged_volume_list(name, tmphlq=tmphlq) or [] if bool(set(volumes) & set(cataloged_volume_list)): return True else: @@ -460,10 +468,11 @@ def data_set_cataloged(name, volumes=None): return False @staticmethod - def data_set_cataloged_volume_list(name): + def data_set_cataloged_volume_list(name, tmphlq=None): """Get the volume list for a cataloged dataset name. Arguments: name (str) -- The data set name to check if cataloged. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: list{str} -- A list of volumes where the dataset is cataloged. Raise: @@ -472,8 +481,15 @@ def data_set_cataloged_volume_list(name): name = name.upper() module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}') ALL".format(name) + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin, errors='replace' + cmd, + data=stdin, + errors='replace' ) # The above 'listcat entries all' command to idcams returns: # rc=0 if data set found in catalog @@ -494,7 +510,7 @@ def data_set_cataloged_volume_list(name): return volume_list @staticmethod - def data_set_exists(name, volume=None): + def data_set_exists(name, volume=None, tmphlq=None): """Determine if a data set exists. This will check the catalog in addition to the volume table of contents. @@ -502,14 +518,15 @@ def data_set_exists(name, volume=None): Arguments: name (str) -- The data set name to check if exists. volume (str) -- The volume the data set may reside on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If data is found. """ - if DataSet.data_set_cataloged(name): + if DataSet.data_set_cataloged(name, tmphlq=tmphlq): return True elif volume is not None: - return DataSet._is_in_vtoc(name, volume) + return DataSet._is_in_vtoc(name, volume, tmphlq=tmphlq) return False @staticmethod @@ -902,17 +919,18 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): return changed, present @staticmethod - def _is_in_vtoc(name, volume): + def _is_in_vtoc(name, volume, tmphlq=None): """Determines if data set is in a volume's table of contents. Arguments: name (str) -- The name of the data set to search for. volume (str) -- The volume to search the table of contents of. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If data set was found in table of contents for volume. """ - data_sets = vtoc.get_volume_entry(volume) + data_sets = vtoc.get_volume_entry(volume, tmphlq=tmphlq) data_set = vtoc.find_data_set_in_volume_output(name, data_sets) if data_set is not None: return True diff --git a/plugins/module_utils/vtoc.py b/plugins/module_utils/vtoc.py index 3cae4fd92..d526b5207 100644 --- a/plugins/module_utils/vtoc.py +++ b/plugins/module_utils/vtoc.py @@ -20,7 +20,7 @@ ) -def get_volume_entry(volume): +def get_volume_entry(volume, tmphlq=None): """Retrieve VTOC information for all data sets with entries on the volume. @@ -28,6 +28,8 @@ def get_volume_entry(volume): ---------- volume : str The name of the volume. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -43,7 +45,7 @@ def get_volume_entry(volume): stdin = " LISTVTOC FORMAT,VOL=3390={0}".format(volume.upper()) # dd = "SYS1.VVDS.V{0}".format(volume.upper()) dd = "{0},vol".format(volume.upper()) - stdout = _iehlist(dd, stdin) + stdout = _iehlist(dd, stdin, tmphlq=tmphlq) if stdout is None: return None data_sets = _process_output(stdout) @@ -102,7 +104,7 @@ def find_data_set_in_volume_output(data_set_name, data_sets): return None -def _iehlist(dd, stdin): +def _iehlist(dd, stdin, tmphlq=None): """Calls IEHLIST program. Parameters @@ -111,6 +113,8 @@ def _iehlist(dd, stdin): Volume information to pass as DD statement. stdin : str Input to stdin. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -119,9 +123,15 @@ def _iehlist(dd, stdin): """ module = AnsibleModuleHelper(argument_spec={}) response = None + + cmd = "mvscmd --pgm=iehlist --sysprint=* --dd={0} --sysin=stdin ".format(dd) + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, stdout, stderr = module.run_command( - "mvscmd --pgm=iehlist --sysprint=* --dd={0} --sysin=stdin ".format(dd), - data=stdin, errors='replace' + cmd, + data=stdin, + errors='replace' ) if rc == 0: response = stdout From 0aecda8a609932c7c18c44f6f3e9835fa18261c1 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Mon, 9 Sep 2024 09:47:38 -0600 Subject: [PATCH 15/26] Update module utils --- plugins/module_utils/backup.py | 29 +++- plugins/module_utils/copy.py | 26 ++- plugins/module_utils/data_set.py | 273 ++++++++++++++++++++----------- plugins/module_utils/encode.py | 20 ++- plugins/module_utils/vtoc.py | 6 +- 5 files changed, 233 insertions(+), 121 deletions(-) diff --git a/plugins/module_utils/backup.py b/plugins/module_utils/backup.py index 716e0d3b2..eb2a22004 100644 --- a/plugins/module_utils/backup.py +++ b/plugins/module_utils/backup.py @@ -76,6 +76,8 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): It could be an MVS PS/PDS/PDSE/VSAM(KSDS), etc. bk_dsn : str The name of the backup data set. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -128,14 +130,14 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): if DataSet.is_gds_positive_relative_name(bk_dsn): cp_rc = datasets.copy(dsn, bk_dsn) else: - cp_rc = _copy_ds(dsn, bk_dsn) + cp_rc = _copy_ds(dsn, bk_dsn, tmphlq=tmphlq) if cp_rc == 12: # The data set is probably a PDS or PDSE # Delete allocated backup that was created when attempting to use _copy_ds() # Safe to delete because _copy_ds() would have raised an exception if it did # not successfully create the backup data set, so no risk of it predating module invocation datasets.delete(bk_dsn) - _allocate_model(bk_dsn, dsn) + _allocate_model(bk_dsn, dsn, tmphlq=tmphlq) rc, out, err = _copy_pds(dsn, bk_dsn) if rc != 0: raise BackupError( @@ -222,7 +224,7 @@ def uss_file_backup(path, backup_name=None, compress=False): return backup_name -def _copy_ds(ds, bk_ds): +def _copy_ds(ds, bk_ds, tmphlq=None): """Copy the contents of a data set to another. Parameters @@ -231,6 +233,8 @@ def _copy_ds(ds, bk_ds): The source data set to be copied from. Should be SEQ or VSAM. bk_dsn : str The destination data set to copy to. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -243,14 +247,19 @@ def _copy_ds(ds, bk_ds): When copying data fails. """ module = AnsibleModuleHelper(argument_spec={}) - _allocate_model(bk_ds, ds) + _allocate_model(bk_ds, ds, tmphlq=tmphlq) repro_cmd = """ REPRO - INDATASET('{0}') - OUTDATASET('{1}')""".format( ds, bk_ds ) + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, out, err = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=repro_cmd, errors='replace' + cmd, data=repro_cmd, errors='replace' ) if rc != 0 and rc != 12: datasets.delete(bk_ds) @@ -259,12 +268,12 @@ def _copy_ds(ds, bk_ds): ds, out, err ) ) - if rc != 0 and DataSet.is_empty(ds): + if rc != 0 and DataSet.is_empty(ds, tmphlq=tmphlq): rc = 0 return rc -def _allocate_model(ds, model): +def _allocate_model(ds, model, tmphlq=None): """Allocate a data set using allocation information of a model data set. Parameters @@ -273,6 +282,8 @@ def _allocate_model(ds, model): The name of the data set to be allocated. model : str The name of the data set whose allocation parameters should be used. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -290,7 +301,11 @@ def _allocate_model(ds, model): LIKE('{1}')""".format( ds, model ) + cmd = "mvscmdauth --pgm=ikjeft01 --systsprt=* --systsin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, out, err = module.run_command(cmd, data=alloc_cmd, errors='replace') if rc != 0: raise BackupError( diff --git a/plugins/module_utils/copy.py b/plugins/module_utils/copy.py index 499aecbd9..f8b37b514 100644 --- a/plugins/module_utils/copy.py +++ b/plugins/module_utils/copy.py @@ -339,7 +339,7 @@ def copy_mvs2mvs(src, dest, is_binary=False): return rc, out, err -def copy_vsam_ps(src, dest): +def copy_vsam_ps(src, dest, tmphlq=None): """Copy a VSAM(KSDS) data set to a PS data set vise versa. Parameters @@ -348,6 +348,8 @@ def copy_vsam_ps(src, dest): The VSAM(KSDS) or PS data set to be copied. dest : str The PS or VSAM(KSDS) data set. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -367,14 +369,18 @@ def copy_vsam_ps(src, dest): src = _validate_data_set_name(src) dest = _validate_data_set_name(dest) repro_cmd = REPRO.format(src, dest) + cmd = "mvscmdauth --pgm=idcams --sysprint=stdout --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, out, err = module.run_command(cmd, data=repro_cmd, errors='replace') if rc: raise USSCmdExecError(cmd, rc, out, err) return rc, out, err -def copy_asa_uss2mvs(src, dest): +def copy_asa_uss2mvs(src, dest, tmphlq=None): """Copy a file from USS to an ASA sequential data set or PDS/E member. Parameters @@ -383,6 +389,8 @@ def copy_asa_uss2mvs(src, dest): Path of the USS file. dest : str The MVS destination data set or member. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -394,12 +402,12 @@ def copy_asa_uss2mvs(src, dest): The stderr after the copy command executed successfully. """ oget_cmd = "OGET '{0}' '{1}'".format(src, dest) - rc, out, err = ikjeft01(oget_cmd, authorized=True) + rc, out, err = ikjeft01(oget_cmd, authorized=True, tmphlq=tmphlq) return TSOCmdResponse(rc, out, err) -def copy_asa_mvs2uss(src, dest): +def copy_asa_mvs2uss(src, dest, tmphlq=None): """Copy an ASA sequential data set or member to USS. Parameters @@ -408,6 +416,8 @@ def copy_asa_mvs2uss(src, dest): The MVS data set to be copied. dest : str Destination path in USS. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -422,12 +432,12 @@ def copy_asa_mvs2uss(src, dest): dest = _validate_path(dest) oput_cmd = "OPUT '{0}' '{1}'".format(src, dest) - rc, out, err = ikjeft01(oput_cmd, authorized=True) + rc, out, err = ikjeft01(oput_cmd, authorized=True, tmphlq=tmphlq) return TSOCmdResponse(rc, out, err) -def copy_asa_pds2uss(src, dest): +def copy_asa_pds2uss(src, dest, tmphlq=None): """Copy all members from an ASA PDS/E to USS. Parameters @@ -436,6 +446,8 @@ def copy_asa_pds2uss(src, dest): The MVS data set to be copied. dest : str Destination path in USS (must be a directory). + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -465,7 +477,7 @@ def copy_asa_pds2uss(src, dest): dest_path = path.join(dest, member) oput_cmd = "OPUT '{0}' '{1}'".format(src_member, dest_path) - rc, out, err = ikjeft01(oput_cmd, authorized=True) + rc, out, err = ikjeft01(oput_cmd, authorized=True, tmphlq=tmphlq) if rc != 0: return TSOCmdResponse(rc, out, err) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 80da6e231..35e2bffe6 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -174,7 +174,7 @@ def ensure_present( arguments.pop("replace", None) present = False changed = False - if DataSet.data_set_cataloged(name): + if DataSet.data_set_cataloged(name, tmphlq=tmp_hlq): present = True if not present: @@ -185,7 +185,7 @@ def ensure_present( # data set exists on volume if "Error Code: 0x4704" in e.msg: present, changed = DataSet.attempt_catalog_if_necessary( - name, volumes + name, volumes, tmphlq=tmp_hlq ) if present and changed: raise_error = False @@ -200,26 +200,28 @@ def ensure_present( return True @staticmethod - def ensure_absent(name, volumes=None): + def ensure_absent(name, volumes=None, tmphlq=None): """Deletes provided data set if it exists. Arguments: name (str) -- The name of the data set to ensure is absent. volumes (list[str]) -- The volumes the data set may reside on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: changed (bool) -- Indicates if changes were made. """ - changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes) + changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=tmphlq) return changed # ? should we do additional check to ensure member was actually created? @staticmethod - def ensure_member_present(name, replace=False): + def ensure_member_present(name, replace=False, tmphlq=None): """Creates data set member if it does not already exist. Arguments: name (str) -- The name of the data set to ensure is present. replace (bool) -- Used to determine behavior when data set already + tmphlq (str) -- High Level Qualifier for temporary datasets. exists. Returns: @@ -229,7 +231,7 @@ def ensure_member_present(name, replace=False): if not replace: return False DataSet.delete_member(name) - DataSet.create_member(name) + DataSet.create_member(name, tmphlq=tmphlq) return True @staticmethod @@ -242,21 +244,22 @@ def ensure_member_absent(name, force=False): return False @staticmethod - def ensure_cataloged(name, volumes): + def ensure_cataloged(name, volumes, tmphlq=None): """Ensure a data set is cataloged. Data set can initially be in cataloged or uncataloged state when this function is called. Arguments: name (str) -- The data set name to ensure is cataloged. volume (str) -- The volume on which the data set should exist. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If changes were made. """ - if DataSet.data_set_cataloged(name, None): + if DataSet.data_set_cataloged(name, None, tmphlq=tmphlq): return False try: - DataSet.catalog(name, volumes) + DataSet.catalog(name, volumes, tmphlq=tmphlq) except DatasetCatalogError: raise DatasetCatalogError( name, volumes, "-1", "Data set was not found. Unable to catalog." @@ -264,23 +267,24 @@ def ensure_cataloged(name, volumes): return True @staticmethod - def ensure_uncataloged(name): + def ensure_uncataloged(name, tmphlq=None): """Ensure a data set is uncataloged. Data set can initially be in cataloged or uncataloged state when this function is called. Arguments: name (str) -- The data set name to ensure is uncataloged. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If changes were made. """ - if DataSet.data_set_cataloged(name): - DataSet.uncatalog(name) + if DataSet.data_set_cataloged(name, tmphlq=tmphlq): + DataSet.uncatalog(name, tmphlq=tmphlq) return True return False @staticmethod - def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None): + def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None, tmphlq=None): """Allocates a data set based on the attributes of a 'model' data set. Useful when a data set needs to be created identical to another. Supported model(s) are Physical Sequential (PS), Partitioned Data Sets (PDS/PDSE), @@ -297,17 +301,18 @@ def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vo asa_text {bool} -- Whether the new data set should support ASA control characters (have record format FBA) vol {str} -- The volume where data set should be allocated + tmphlq {str} -- High Level Qualifier for temporary datasets. Raise: NonExistentSourceError: When the model data set does not exist. MVSCmdExecError: When the call to IKJEFT01 to allocate the data set fails. """ - if not DataSet.data_set_exists(model): + if not DataSet.data_set_exists(model, tmphlq=tmphlq): raise DatasetNotFoundError(model) ds_name = extract_dsname(ds_name) - model_type = DataSet.data_set_type(model) + model_type = DataSet.data_set_type(model, tmphlq=tmphlq) # The break lines are absolutely necessary, a JCL code line can't # be longer than 72 characters. The following JCL is compatible with @@ -339,12 +344,12 @@ def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vo RECFM(U) - DSNTYPE(LIBRARY)""".format(alloc_cmd) - rc, out, err = mvs_cmd.ikjeft01(alloc_cmd, authorized=True) + rc, out, err = mvs_cmd.ikjeft01(alloc_cmd, authorized=True, tmphlq=tmphlq) if rc != 0: raise MVSCmdExecError(rc, out, err) @staticmethod - def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None): + def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None, tmphlq=None): """ Allocates a new current generation of a generation data group using a model data set to set its attributes. @@ -364,6 +369,8 @@ def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False characters (have record format FBA). vol : str, optional The volume where the new data set should be allocated. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -403,7 +410,7 @@ def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False space_type='' ) - success = data_set_object.ensure_present() + success = data_set_object.ensure_present(tmp_hlq=tmphlq) if not success: raise DatasetCreateError( data_set=ds_name, @@ -611,11 +618,12 @@ def files_in_data_set_members(src, dest): return False @staticmethod - def data_set_volume(name): + def data_set_volume(name, tmphlq=None): """Checks the volume where a data set is located. Arguments: name (str) -- The name of the data set. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: str -- Name of the volume where the data set is. @@ -631,7 +639,7 @@ def data_set_volume(name): return data_set_information[0].volume # If listing failed to return a data set, then it's probably a VSAM. - output = DataSet._get_listcat_data(name) + output = DataSet._get_listcat_data(name, tmphlq=tmphlq) if re.findall(r"NOT FOUND|NOT LISTED", output): raise DatasetNotFoundError(name) @@ -644,12 +652,13 @@ def data_set_volume(name): raise DatasetVolumeError(name) @staticmethod - def data_set_type(name, volume=None): + def data_set_type(name, volume=None, tmphlq=None): """Checks the type of a data set, data sets must be cataloged. Arguments: name (str) -- The name of the data set. volume (str) -- The volume the data set may reside on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: str -- The type of the data set (one of "PS", "PO", "DA", "GDG", @@ -657,7 +666,7 @@ def data_set_type(name, volume=None): None -- If the data set does not exist or ZOAU is not able to determine the type. """ - if not DataSet.data_set_exists(name, volume): + if not DataSet.data_set_exists(name, volume, tmphlq=tmphlq): return None data_sets_found = datasets.list_datasets(name) @@ -674,7 +683,7 @@ def data_set_type(name, volume=None): # Next, trying to get the DATA information of a VSAM through # LISTCAT. - output = DataSet._get_listcat_data(name) + output = DataSet._get_listcat_data(name, tmphlq=tmphlq) # Filtering all the DATA information to only get the ATTRIBUTES block. data_set_attributes = re.findall( @@ -694,11 +703,12 @@ def data_set_type(name, volume=None): return None @staticmethod - def _get_listcat_data(name): + def _get_listcat_data(name, tmphlq=None): """Runs IDCAMS to get the DATA information associated with a data set. Arguments: name (str) -- Name of the data set. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: str -- Standard output from IDCAMS. @@ -706,8 +716,13 @@ def _get_listcat_data(name): name = name.upper() module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENT('{0}') DATA ALL".format(name) + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin, errors='replace' + cmd, data=stdin, errors='replace' ) if rc != 0: @@ -716,20 +731,21 @@ def _get_listcat_data(name): return stdout @staticmethod - def is_empty(name, volume=None): + def is_empty(name, volume=None, tmphlq=None): """Determines whether a data set is empty. Arguments: name (str) -- The name of the data set. volume (str) -- The volume where the data set resides. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- Whether the data set is empty or not. """ - if not DataSet.data_set_exists(name, volume): + if not DataSet.data_set_exists(name, volume, tmphlq=tmphlq): raise DatasetNotFoundError(name) - ds_type = DataSet.data_set_type(name, volume) + ds_type = DataSet.data_set_type(name, volume, tmphlq=tmphlq) if ds_type in DataSet.MVS_PARTITIONED: return DataSet._pds_empty(name) @@ -738,7 +754,7 @@ def is_empty(name, volume=None): rc, stdout, stderr = module.run_command("head \"//'{0}'\"".format(name), errors='replace') return rc == 0 and len(stdout.strip()) == 0 elif ds_type in DataSet.MVS_VSAM: - return DataSet._vsam_empty(name) + return DataSet._vsam_empty(name, tmphlq=tmphlq) @staticmethod def _pds_empty(name): @@ -758,11 +774,12 @@ def _pds_empty(name): return rc == 2 @staticmethod - def _vsam_empty(name): + def _vsam_empty(name, tmphlq=None): """Determines if a VSAM data set is empty. Arguments: name (str) -- The name of the VSAM data set. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool - If VSAM data set is empty. @@ -773,23 +790,30 @@ def _vsam_empty(name): empty_cmd = """ PRINT - INFILE(MYDSET) - COUNT(1)""" + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin --mydset={0}".format( + name + ) + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, out, err = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin --mydset={0}".format( - name), - data=empty_cmd, errors='replace' + cmd, data=empty_cmd, errors='replace' ) + if rc == 4 or "VSAM OPEN RETURN CODE IS 160" in out: return True elif rc != 0: return False @staticmethod - def attempt_catalog_if_necessary(name, volumes): + def attempt_catalog_if_necessary(name, volumes, tmphlq=None): """Attempts to catalog a data set if not already cataloged. Arguments: name (str) -- The name of the data set. volumes (list[str]) -- The volumes the data set may reside on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- Whether the data set is now present. @@ -797,12 +821,12 @@ def attempt_catalog_if_necessary(name, volumes): """ changed = False present = False - if DataSet.data_set_cataloged(name): + if DataSet.data_set_cataloged(name, tmphlq=tmphlq): present = True elif volumes is not None: errors = False try: - DataSet.catalog(name, volumes) + DataSet.catalog(name, volumes, tmphlq=tmphlq) except DatasetCatalogError: errors = True if not errors: @@ -811,7 +835,7 @@ def attempt_catalog_if_necessary(name, volumes): return present, changed @staticmethod - def attempt_catalog_if_necessary_and_delete(name, volumes): + def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): """Attempts to catalog a data set if not already cataloged, then deletes the data set. This is helpful when a data set currently cataloged is not the data @@ -822,6 +846,7 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): Arguments: name (str) -- The name of the data set. volumes (list[str]) -- The volumes the data set may reside on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: changed (bool) -- Whether changes were made. @@ -833,12 +858,12 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): if volumes: # Check if the data set is cataloged - present = DataSet.data_set_cataloged(name) + present = DataSet.data_set_cataloged(name, tmphlq=tmphlq) if present: # Data set is cataloged, now check it its cataloged on the provided volumes # If it is, we just delete because the DS is the right one wanting deletion. - present = DataSet.data_set_cataloged(name, volumes) + present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) if present: DataSet.delete(name) @@ -853,41 +878,41 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): # We need to identify the volumes where the current cataloged data set # is located for use later when we recatalog. Code is strategically # placed before the uncatalog. - cataloged_volume_list_original = DataSet.data_set_cataloged_volume_list(name) + cataloged_volume_list_original = DataSet.data_set_cataloged_volume_list(name, tmphlq=tmphlq) try: - DataSet.uncatalog(name) + DataSet.uncatalog(name, tmphlq=tmphlq) except DatasetUncatalogError: return changed, present # Catalog the data set for the provided volumes try: - DataSet.catalog(name, volumes) + DataSet.catalog(name, volumes, tmphlq=tmphlq) except DatasetCatalogError: try: # A failure, so recatalog the original data set on the original volumes - DataSet.catalog(name, cataloged_volume_list_original) + DataSet.catalog(name, cataloged_volume_list_original, tmphlq=tmphlq) except DatasetCatalogError: pass return changed, present # Check the recatalog, ensure it cataloged before we try to remove - present = DataSet.data_set_cataloged(name, volumes) + present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) if present: try: DataSet.delete(name) except DatasetDeleteError: try: - DataSet.uncatalog(name) + DataSet.uncatalog(name, tmphlq=tmphlq) except DatasetUncatalogError: try: - DataSet.catalog(name, cataloged_volume_list_original) + DataSet.catalog(name, cataloged_volume_list_original, tmphlq=tmphlq) except DatasetCatalogError: pass return changed, present try: - DataSet.catalog(name, cataloged_volume_list_original) + DataSet.catalog(name, cataloged_volume_list_original, tmphlq=tmphlq) changed = True present = False except DatasetCatalogError: @@ -896,18 +921,18 @@ def attempt_catalog_if_necessary_and_delete(name, volumes): return changed, present else: try: - DataSet.catalog(name, volumes) + DataSet.catalog(name, volumes, tmphlq=tmphlq) except DatasetCatalogError: return changed, present - present = DataSet.data_set_cataloged(name, volumes) + present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) if present: DataSet.delete(name) changed = True present = False else: - present = DataSet.data_set_cataloged(name, None) + present = DataSet.data_set_cataloged(name, None, tmphlq=tmphlq) if present: try: DataSet.delete(name) @@ -1153,7 +1178,7 @@ def create( except exceptions.DatasetVerificationError: # verification of a data set spanning multiple volumes is currently broken in ZOAU v.1.3.0 if volumes and len(volumes) > 1: - if DataSet.data_set_cataloged(name, volumes): + if DataSet.data_set_cataloged(name, volumes, tmphlq=tmp_hlq): return 0 raise DatasetCreateError( raw_name if raw_name else name, @@ -1179,12 +1204,13 @@ def delete(name): @staticmethod # TODO: verify that this method works for all lengths etc - def create_member(name): + def create_member(name, tmphlq=None): """Create a data set member if the partitioned data set exists. Also used to overwrite a data set member if empty replacement is desired. Arguments: name (str) -- The data set name, including member name, to create. + tmphlq (str) -- High Level Qualifier for temporary datasets. Raises: DatasetNotFoundError: If data set cannot be found. @@ -1192,7 +1218,7 @@ def create_member(name): """ module = AnsibleModuleHelper(argument_spec={}) base_dsname = name.split("(")[0] - if not base_dsname or not DataSet.data_set_cataloged(base_dsname): + if not base_dsname or not DataSet.data_set_cataloged(base_dsname, tmphlq=tmphlq): raise DatasetNotFoundError(name) tmp_file = tempfile.NamedTemporaryFile(delete=True) rc, stdout, stderr = module.run_command( @@ -1217,26 +1243,28 @@ def delete_member(name, force=False): raise DatasetMemberDeleteError(name, rc) @staticmethod - def catalog(name, volumes): + def catalog(name, volumes, tmphlq=None): """Catalog an uncataloged data set Arguments: name (str) -- The name of the data set to catalog. volumes (list[str]) -- The volume(s) the data set resides on. + tmphlq (str) -- High Level Qualifier for temporary datasets. """ - if DataSet.is_vsam(name, volumes): - DataSet._catalog_vsam(name, volumes) + if DataSet.is_vsam(name, volumes, tmphlq=tmphlq): + DataSet._catalog_vsam(name, volumes, tmphlq=tmphlq) else: - DataSet._catalog_non_vsam(name, volumes) + DataSet._catalog_non_vsam(name, volumes, tmphlq=tmphlq) @staticmethod # TODO: extend for multi volume data sets - def _catalog_non_vsam(name, volumes): + def _catalog_non_vsam(name, volumes, tmphlq=None): """Catalog a non-VSAM data set. Arguments: name (str) -- The data set to catalog. volumes (str) -- The volume(s) the data set resides on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Raises: DatasetCatalogError: When attempt at catalog fails. @@ -1245,21 +1273,27 @@ def _catalog_non_vsam(name, volumes): iehprogm_input = DataSet._build_non_vsam_catalog_command( name.upper(), volumes) + cmd = "mvscmdauth --pgm=iehprogm --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=iehprogm --sysprint=* --sysin=stdin", data=iehprogm_input, errors='replace' + cmd, data=iehprogm_input, errors='replace' ) + if rc != 0 or "NORMAL END OF TASK RETURNED" not in stdout: raise DatasetCatalogError(name, volumes, rc) return @staticmethod # TODO: extend for multi volume data sets - def _catalog_vsam(name, volumes): + def _catalog_vsam(name, volumes, tmphlq=None): """Catalog a VSAM data set. Arguments: name (str) -- The data set to catalog. volumes (str) -- The volume(s) the data set resides on. + tmphlq (str) -- High Level Qualifier for temporary datasets. Raises: DatasetCatalogError: When attempt at catalog fails. @@ -1273,8 +1307,8 @@ def _catalog_vsam(name, volumes): # In order to catalog a uncataloged data set, we can't rely on LISTCAT # so using the VTOC entries we can make some assumptions of if the data set # is indexed, linear etc. - ds_vtoc_data_entry = vtoc.get_data_set_entry(name + ".DATA", volumes[0]) - ds_vtoc_index_entry = vtoc.get_data_set_entry(name + ".INDEX", volumes[0]) + ds_vtoc_data_entry = vtoc.get_data_set_entry(name + ".DATA", volumes[0], tmphlq=tmphlq) + ds_vtoc_index_entry = vtoc.get_data_set_entry(name + ".INDEX", volumes[0], tmphlq=tmphlq) if ds_vtoc_data_entry and ds_vtoc_index_entry: data_set_type_vsam = "INDEXED" @@ -1294,8 +1328,10 @@ def _catalog_vsam(name, volumes): data_set_type_vsam, ) - command_rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command, errors='replace') + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + command_rc, stdout, stderr = module.run_command(cmd, data=command, errors='replace') if command_rc == 0: success = True @@ -1309,8 +1345,11 @@ def _catalog_vsam(name, volumes): "LINEAR", ) - command_rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=command, errors='replace') + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + + command_rc, stdout, stderr = module.run_command(cmd, data=command, errors='replace') if command_rc == 0: success = True @@ -1325,23 +1364,25 @@ def _catalog_vsam(name, volumes): return @staticmethod - def uncatalog(name): + def uncatalog(name, tmphlq=None): """Uncatalog a data set. Arguments: name (str) -- The name of the data set to uncatalog. + tmphlq (str) -- High Level Qualifier for temporary datasets. """ - if DataSet.is_vsam(name): - DataSet._uncatalog_vsam(name) + if DataSet.is_vsam(name, tmphlq=tmphlq): + DataSet._uncatalog_vsam(name, tmphlq=tmphlq) else: - DataSet._uncatalog_non_vsam(name) + DataSet._uncatalog_non_vsam(name, tmphlq=tmphlq) @staticmethod - def _uncatalog_non_vsam(name): + def _uncatalog_non_vsam(name, tmphlq=None): """Uncatalog a non-VSAM data set. Arguments: name (str) -- The name of the data set to uncatalog. + tmphlq (str) -- High Level Qualifier for temporary datasets. Raises: DatasetUncatalogError: When uncataloging fails. @@ -1352,10 +1393,13 @@ def _uncatalog_non_vsam(name): try: temp_name = DataSet.create_temp(name.split(".")[0]) DataSet.write(temp_name, iehprogm_input) - rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=iehprogm --sysprint=* --sysin={0}".format( - temp_name), errors='replace' - ) + + cmd = "mvscmdauth --pgm=iehprogm --sysprint=* --sysin={0}".format(temp_name) + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + + rc, stdout, stderr = module.run_command(cmd, errors='replace') + if rc != 0 or "NORMAL END OF TASK RETURNED" not in stdout: raise DatasetUncatalogError(name, rc) finally: @@ -1364,11 +1408,12 @@ def _uncatalog_non_vsam(name): return @staticmethod - def _uncatalog_vsam(name): + def _uncatalog_vsam(name, tmphlq=None): """Uncatalog a VSAM data set. Arguments: name (str) -- The name of the data set to uncatalog. + tmphlq (str) -- High Level Qualifier for temporary datasets. Raises: DatasetUncatalogError: When uncatalog fails. @@ -1376,15 +1421,17 @@ def _uncatalog_vsam(name): module = AnsibleModuleHelper(argument_spec={}) idcams_input = DataSet._VSAM_UNCATALOG_COMMAND.format(name) - rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=idcams_input, errors='replace' - ) + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + + rc, stdout, stderr = module.run_command(cmd, data=idcams_input, errors='replace') if rc != 0: raise DatasetUncatalogError(name, rc) @staticmethod - def is_vsam(name, volumes=None): + def is_vsam(name, volumes=None, tmphlq=None): """Determine a given data set is VSAM. If volume is not provided, then LISTCAT will be used to check data set info. If volume is provided, then VTOC will be used to check data set info. If not in VTOC @@ -1395,27 +1442,29 @@ def is_vsam(name, volumes=None): Keyword Arguments: volumes (list[str]) -- The name(s) of the volume(s). (default: (None)) + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If the data set is VSAM. """ if not volumes: - return DataSet._is_vsam_from_listcat(name) + return DataSet._is_vsam_from_listcat(name, tmphlq=tmphlq) # ? will multivolume data set have vtoc info for each volume? - return DataSet._is_vsam_from_vtoc(name, volumes[0]) + return DataSet._is_vsam_from_vtoc(name, volumes[0], tmphlq=tmphlq) @staticmethod - def _is_vsam_from_vtoc(name, volume): + def _is_vsam_from_vtoc(name, volume, tmphlq=None): """Use VTOC to determine if a given data set is VSAM. Arguments: name (str) -- The name of the data set. volume (str) -- The volume name whose table of contents will be searched. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If the data set is VSAM. """ - data_sets = vtoc.get_volume_entry(volume) + data_sets = vtoc.get_volume_entry(volume, tmphlq=tmphlq) vsam_name = name + ".DATA" data_set = vtoc.find_data_set_in_volume_output(vsam_name, data_sets) if data_set is None: @@ -1425,20 +1474,24 @@ def _is_vsam_from_vtoc(name, volume): return False @staticmethod - def _is_vsam_from_listcat(name): + def _is_vsam_from_listcat(name, tmphlq=None): """Use LISTCAT command to determine if a given data set is VSAM. Arguments: name (str) -- The name of the data set. + tmphlq (str) -- High Level Qualifier for temporary datasets. Returns: bool -- If the data set is VSAM. """ module = AnsibleModuleHelper(argument_spec={}) stdin = " LISTCAT ENTRIES('{0}')".format(name.upper()) - rc, stdout, stderr = module.run_command( - "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=stdin, errors='replace' - ) + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + + rc, stdout, stderr = module.run_command(cmd, data=stdin, errors='replace') if re.search(r"^0CLUSTER[ ]+-+[ ]+" + name + r"[ ]*$", stdout, re.MULTILINE): return True return False @@ -2042,7 +2095,7 @@ def create(self, tmp_hlq=None, replace=True, force=False): } formatted_args = DataSet._build_zoau_args(**arguments) changed = False - if DataSet.data_set_exists(self.name): + if DataSet.data_set_exists(self.name, tmphlq=tmp_hlq): DataSet.delete(self.name) changed = True zoau_data_set = datasets.create(**formatted_args) @@ -2118,53 +2171,73 @@ def delete(self): DataSet.ensure_absent(self.name, self.volumes) self.set_state("absent") - def ensure_cataloged(self): + def ensure_cataloged(self, tmp_hlq=None): """ Ensures the data set is cataloged, if not catalogs it. + Parameters + ---------- + tmp_hlq : str + High level qualifier for temporary datasets. + Returns ------- int Indicates if changes were made. """ - rc = DataSet.ensure_cataloged(name=self.name, volumes=self.volumes) + rc = DataSet.ensure_cataloged(name=self.name, volumes=self.volumes, tmphlq=tmp_hlq) self.is_cataloged = True return rc - def catalog(self): + def catalog(self, tmp_hlq=None): """Catalog the data set in question. + Parameters + ---------- + tmp_hlq : str + High level qualifier for temporary datasets. + Returns ------- int Indicates if changes were made. """ - rc = DataSet.catalog(self.name, self.volumes) + rc = DataSet.catalog(self.name, self.volumes, tmphlq=tmp_hlq) self.is_cataloged = True return rc - def ensure_uncataloged(self): + def ensure_uncataloged(self, tmp_hlq=None): """ Ensures the data set is uncataloged, if not catalogs it. + Parameters + ---------- + tmp_hlq : str + High level qualifier for temporary datasets. + Returns ------- int Indicates if changes were made. """ - rc = DataSet.ensure_uncataloged(self.name) + rc = DataSet.ensure_uncataloged(self.name, tmphlq=tmp_hlq) self.is_cataloged = False return rc - def uncatalog(self): + def uncatalog(self, tmp_hlq=None): """Uncatalog the data set in question. + Parameters + ---------- + tmp_hlq : str + High level qualifier for temporary datasets. + Returns ------- int Indicates if changes were made. """ - rc = DataSet.uncatalog(self.name) + rc = DataSet.uncatalog(self.name, tmphlq=tmp_hlq) self.is_cataloged = False return rc @@ -2223,20 +2296,22 @@ def ensure_absent(self, force): rc = DataSet.ensure_member_absent(self.name, force) return rc - def ensure_present(self, replace=None): + def ensure_present(self, replace=None, tmphlq=None): """ Make sure that the member is created or fail creating it. Parameters ---------- replace : bool Used to determine behavior when member already exists. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- int Indicates if changes were made. """ - rc = DataSet.ensure_member_present(self.name, replace) + rc = DataSet.ensure_member_present(self.name, replace, tmphlq=tmphlq) return rc diff --git a/plugins/module_utils/encode.py b/plugins/module_utils/encode.py index 606a2a189..9bdac056a 100644 --- a/plugins/module_utils/encode.py +++ b/plugins/module_utils/encode.py @@ -162,7 +162,7 @@ def _validate_encoding(self, encoding): parsed_args = parser.parse_args({"encoding": encoding}) return parsed_args.get("encoding") - def listdsi_data_set(self, ds): + def listdsi_data_set(self, ds, tmphlq=None): """Invoke IDCAMS LISTCAT command to get the record length and space used to estimate the space used by the VSAM data set. @@ -170,6 +170,8 @@ def listdsi_data_set(self, ds): ---------- ds : str The VSAM data set to be checked. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -187,7 +189,11 @@ def listdsi_data_set(self, ds): reclen = 80 space_u = 1024 listcat_cmd = " LISTCAT ENT('{0}') ALL".format(ds) + cmd = "mvscmdauth --pgm=ikjeft01 --systsprt=stdout --systsin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + rc, out, err = self.module.run_command(cmd, data=listcat_cmd, errors='replace') if rc: raise EncodeError(err) @@ -460,7 +466,7 @@ def uss_convert_encoding_prev(self, src, dest, from_code, to_code): return convert_rc def mvs_convert_encoding( - self, src, dest, from_code, to_code, src_type=None, dest_type=None + self, src, dest, from_code, to_code, src_type=None, dest_type=None, tmphlq=None ): """Convert the encoding of the data from 1) USS to MVS(PS, PDS/E VSAM) @@ -484,6 +490,8 @@ def mvs_convert_encoding( The input MVS data set or type: PS, PDS, PDSE, VSAM(KSDS). dest_type : str The output MVS data set type. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -507,11 +515,11 @@ def mvs_convert_encoding( temp_src = mkdtemp() rc, out, err = copy.copy_pds2uss(src, temp_src) if src_type == "KSDS": - reclen, space_u = self.listdsi_data_set(src.upper()) + reclen, space_u = self.listdsi_data_set(src.upper(), tmphlq=tmphlq) # RDW takes the first 4 bytes in the VB format, hence we need to add an extra buffer to the vsam max recl. reclen += 4 temp_ps = self.temp_data_set(reclen, space_u) - rc, out, err = copy.copy_vsam_ps(src.upper(), temp_ps) + rc, out, err = copy.copy_vsam_ps(src.upper(), temp_ps, tmphlq=tmphlq) temp_src_fo = NamedTemporaryFile() temp_src = temp_src_fo.name rc, out, err = copy.copy_ps2uss(temp_ps, temp_src) @@ -526,12 +534,12 @@ def mvs_convert_encoding( convert_rc = True else: if dest_type == "KSDS": - reclen, space_u = self.listdsi_data_set(dest.upper()) + reclen, space_u = self.listdsi_data_set(dest.upper(), tmphlq=tmphlq) # RDW takes the first 4 bytes or records in the VB format, hence we need to add an extra buffer to the vsam max recl. reclen += 4 temp_ps = self.temp_data_set(reclen, space_u) rc, out, err = copy.copy_uss2mvs(temp_dest, temp_ps, "PS") - rc, out, err = copy.copy_vsam_ps(temp_ps, dest.upper()) + rc, out, err = copy.copy_vsam_ps(temp_ps, dest.upper(), tmphlq=tmphlq) convert_rc = True elif dest_type == "PO": for (dir, subdir, files) in walk(temp_dest): diff --git a/plugins/module_utils/vtoc.py b/plugins/module_utils/vtoc.py index d526b5207..fbe457388 100644 --- a/plugins/module_utils/vtoc.py +++ b/plugins/module_utils/vtoc.py @@ -54,7 +54,7 @@ def get_volume_entry(volume, tmphlq=None): return data_sets -def get_data_set_entry(data_set_name, volume): +def get_data_set_entry(data_set_name, volume, tmphlq=None): """Retrieve VTOC information for a single data set on a volume. @@ -64,6 +64,8 @@ def get_data_set_entry(data_set_name, volume): The name of the data set to retrieve information for. volume : str The name of the volume. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -71,7 +73,7 @@ def get_data_set_entry(data_set_name, volume): The information for the data set found in VTOC. """ data_set = None - data_sets = get_volume_entry(volume) + data_sets = get_volume_entry(volume, tmphlq=tmphlq) for ds in data_sets: if ds.get("data_set_name") == data_set_name.upper(): data_set = ds From d62e34c83218d7830f777e0cc85a17e4b82dffad Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Wed, 11 Sep 2024 12:22:42 -0600 Subject: [PATCH 16/26] Update modules --- plugins/modules/zos_apf.py | 2 +- plugins/modules/zos_copy.py | 81 ++++++++++++++++++++++----------- plugins/modules/zos_data_set.py | 6 +-- plugins/modules/zos_encode.py | 21 ++++++--- plugins/modules/zos_fetch.py | 9 +++- 5 files changed, 80 insertions(+), 39 deletions(-) diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index ceeea04de..19b81e0d3 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -343,7 +343,7 @@ def backupOper(module, src, backup, tmphlq=None): """ file_type = None if data_set.is_data_set(src): - file_type = data_set.DataSet.data_set_type(src) + file_type = data_set.DataSet.data_set_type(src, tmphlq=tmphlq) else: if os.path.exists(src): file_type = 'USS' diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 3c61e40c7..f0cabf3d5 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -967,6 +967,7 @@ def __init__( asa_text=False, backup_name=None, force_lock=False, + tmphlq=None ): """Utility class to handle copying data between two targets. @@ -995,6 +996,8 @@ def __init__( Whether the dest data set should be copied into using disp=shr when is opened by another process. + tmphlq : str + High Level Qualifier for temporary datasets. Attributes ---------- @@ -1018,6 +1021,8 @@ def __init__( Whether the dest data set should be copied into using disp=shr when is opened by another process. + tmphlq : str + High Level Qualifier for temporary datasets. """ self.module = module self.is_binary = is_binary @@ -1026,6 +1031,7 @@ def __init__( self.aliases = aliases self.backup_name = backup_name self.force_lock = force_lock + self.tmphlq = tmphlq def run_command(self, cmd, **kwargs): """Wrapper for AnsibleModule.run_command. @@ -1077,7 +1083,7 @@ def copy_to_seq( copy_args["options"] = "" if src_type == 'USS' and self.asa_text: - response = copy.copy_asa_uss2mvs(new_src, dest) + response = copy.copy_asa_uss2mvs(new_src, dest, tmphlq=self.tmphlq) if response.rc != 0: raise CopyOperationError( @@ -1487,6 +1493,7 @@ def __init__( aliases=False, common_file_args=None, backup_name=None, + tmphlq=None ): """Utility class to handle copying files or data sets to USS target. @@ -1505,6 +1512,8 @@ def __init__( Whether the file to be copied contains binary data. backup_name : str The USS path or data set name of destination backup. + tmphlq : str + High Level Qualifier for temporary datasets. Attributes ---------- @@ -1518,7 +1527,8 @@ def __init__( executable=executable, asa_text=asa_text, aliases=aliases, - backup_name=backup_name + backup_name=backup_name, + tmphlq=tmphlq ) self.common_file_args = common_file_args @@ -1865,7 +1875,7 @@ def _mvs_copy_to_uss( try: if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: if self.asa_text: - response = copy.copy_asa_mvs2uss(src, dest) + response = copy.copy_asa_mvs2uss(src, dest, tmphlq=self.tmphlq) rc = response.rc elif self.executable: try: @@ -1911,7 +1921,7 @@ def _mvs_copy_to_uss( stderr=copy_exception.response.stderr_response ) elif self.asa_text: - response = copy.copy_asa_pds2uss(src, dest) + response = copy.copy_asa_pds2uss(src, dest, tmphlq=self.tmphlq) if response.rc != 0: raise CopyOperationError( @@ -1943,6 +1953,7 @@ def __init__( asa_text=False, backup_name=None, force_lock=False, + tmphlq=None ): """ Utility class to handle copying to partitioned data sets or partitioned data set members. @@ -1960,6 +1971,8 @@ def __init__( binary data. backup_name : str The USS path or data set name of destination backup. + tmphlq : str + High Level Qualifier for temporary datasets. """ super().__init__( module, @@ -1968,7 +1981,8 @@ def __init__( aliases=aliases, asa_text=asa_text, backup_name=backup_name, - force_lock=force_lock + force_lock=force_lock, + tmphlq=tmphlq ) def copy_to_pdse( @@ -2140,7 +2154,7 @@ def copy_to_member( opts["options"] = "" if src_type == 'USS' and self.asa_text: - response = copy.copy_asa_uss2mvs(src, dest) + response = copy.copy_asa_uss2mvs(src, dest, tmphlq=self.tmphlq) rc, out, err = response.rc, response.stdout_response, response.stderr_response else: # While ASA files are just text files, we do a binary copy @@ -2334,7 +2348,8 @@ def create_seq_dataset_from_file( is_binary, asa_text, record_length=None, - volume=None + volume=None, + tmphlq=None ): """Creates a new sequential dataset with attributes suitable to copy the contents of a file into it. @@ -2353,6 +2368,8 @@ def create_seq_dataset_from_file( Whether the file has ASA control characters. volume : str, optional Volume where the data set should be. + tmphlq : str + High Level Qualifier for temporary datasets. """ src_size = os.stat(file).st_size # record_format = record_length = None @@ -2387,7 +2404,7 @@ def create_seq_dataset_from_file( volume=volume ) - data_set.DataSet.ensure_present(replace=force, **dest_params) + data_set.DataSet.ensure_present(replace=force, tmphlq=tmphlq, **dest_params) def backup_data(ds_name, ds_type, backup_name, tmphlq=None): @@ -2601,7 +2618,8 @@ def does_destination_allow_copy( dest_type, is_uss, force, - volume=None + volume=None, + tmphlq=None ): """Checks whether or not the module can copy into the destination specified. @@ -2626,6 +2644,8 @@ def does_destination_allow_copy( Whether or not the module can replace existing destinations. volume : str, optional Volume where the destination should be. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -2644,7 +2664,7 @@ def does_destination_allow_copy( # If the destination is a sequential or VSAM data set and is empty, the module will try to use it, # otherwise, force needs to be True to continue and replace it. if (dest_type in data_set.DataSet.MVS_SEQ or dest_type in data_set.DataSet.MVS_VSAM) and dest_exists: - is_dest_empty = data_set.DataSet.is_empty(dest, volume) + is_dest_empty = data_set.DataSet.is_empty(dest, volume, tmphlq=tmphlq) if not (is_dest_empty or force): return False @@ -2832,7 +2852,8 @@ def allocate_destination_data_set( is_gds, is_active_gds, dest_data_set=None, - volume=None + volume=None, + tmphlq=None ): """ Allocates a new destination data set to copy into, erasing a preexistent one if @@ -2867,6 +2888,8 @@ def allocate_destination_data_set( of the new data set; they will take precedence over any other allocation logic. volume : str, optional Volume where the data set should be allocated into. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- @@ -2920,20 +2943,20 @@ def allocate_destination_data_set( del dest_params["purge"] del dest_params["extended"] del dest_params["fifo"] - data_set.DataSet.ensure_present(replace=force, **dest_params) + data_set.DataSet.ensure_present(replace=force, tmphlq=tmphlq, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_SEQ: volumes = [volume] if volume else None data_set.DataSet.ensure_absent(dest, volumes=volumes) if src_ds_type == "USS": # Taking the temp file when a local file was copied with sftp. - create_seq_dataset_from_file(src, dest, force, is_binary, asa_text, volume=volume) + create_seq_dataset_from_file(src, dest, force, is_binary, asa_text, volume=volume, tmphlq=tmphlq) elif src_ds_type in data_set.DataSet.MVS_SEQ: # Only applying the GDS special case when we don't have an absolute name. if is_gds and not is_active_gds: data_set.DataSet.allocate_gds_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) else: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume, tmphlq=tmphlq) else: temp_dump = None try: @@ -2949,7 +2972,8 @@ def allocate_destination_data_set( is_binary, asa_text, record_length=record_length, - volume=volume + volume=volume, + tmphlq=tmphlq ) finally: if temp_dump: @@ -2961,7 +2985,7 @@ def allocate_destination_data_set( if is_gds and not is_active_gds: data_set.DataSet.allocate_gds_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) else: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume, tmphlq=tmphlq) elif src_ds_type in data_set.DataSet.MVS_SEQ: src_attributes = datasets.list_datasets(src_name)[0] # The size returned by listing is in bytes. @@ -2978,7 +3002,7 @@ def allocate_destination_data_set( type="PDSE", volume=volume ) - data_set.DataSet.ensure_present(replace=force, **dest_params) + data_set.DataSet.ensure_present(replace=force, tmphlq=tmphlq, **dest_params) elif src_ds_type == "USS": if os.path.isfile(src): # This is almost the same as allocating a sequential dataset. @@ -3036,13 +3060,13 @@ def allocate_destination_data_set( volume=volume ) - data_set.DataSet.ensure_present(replace=force, **dest_params) + data_set.DataSet.ensure_present(replace=force, tmphlq=tmphlq, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_VSAM: # If dest_data_set is not available, always create the destination using the src VSAM # as a model. volumes = [volume] if volume else None data_set.DataSet.ensure_absent(dest, volumes=volumes) - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume) + data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume, tmphlq=tmphlq) elif dest_ds_type == "GDG": src_view = gdgs.GenerationDataGroupView(src) @@ -3360,11 +3384,11 @@ def run_module(module, arg_def): copy_handler = CopyHandler(module, is_binary=is_binary) copy_handler._tag_file_encoding(converted_src, "UTF-8") else: - if (is_src_gds and data_set.DataSet.data_set_exists(src)) or ( - not is_src_gds and data_set.DataSet.data_set_exists(src_name)): + if (is_src_gds and data_set.DataSet.data_set_exists(src, tmphlq=tmphlq)) or ( + not is_src_gds and data_set.DataSet.data_set_exists(src_name, tmphlq=tmphlq)): if src_member and not data_set.DataSet.data_set_member_exists(src): raise NonExistentSourceError(src) - src_ds_type = data_set.DataSet.data_set_type(src_name) + src_ds_type = data_set.DataSet.data_set_type(src_name, tmphlq=tmphlq) if src_ds_type not in data_set.DataSet.MVS_VSAM and src_ds_type != "GDG": src_attributes = datasets.list_datasets(src_name)[0] @@ -3403,8 +3427,8 @@ def run_module(module, arg_def): if dest_exists and not os.access(dest, os.W_OK): module.fail_json(msg="Destination {0} is not writable".format(raw_dest)) else: - dest_exists = data_set.DataSet.data_set_exists(dest_name, volume) - dest_ds_type = data_set.DataSet.data_set_type(dest_name, volume) + dest_exists = data_set.DataSet.data_set_exists(dest_name, volume, tmphlq=tmphlq) + dest_ds_type = data_set.DataSet.data_set_type(dest_name, volume, tmphlq=tmphlq) # When dealing with a new generation, we'll override its type to None # so it will be the same type as the source (or whatever dest_data_set has) @@ -3597,7 +3621,8 @@ def run_module(module, arg_def): dest_ds_type, is_uss, force, - volume + volume, + tmphlq ): module.fail_json( msg="{0} already exists on the system, unable to overwrite unless force=True is specified.".format(raw_dest), @@ -3627,7 +3652,8 @@ def run_module(module, arg_def): is_dest_gds, is_dest_gds_active, dest_data_set=dest_data_set, - volume=volume + volume=volume, + tmphlq=tmphlq ) except Exception as err: if converted_src: @@ -3655,6 +3681,7 @@ def run_module(module, arg_def): asa_text=asa_text, backup_name=backup_name, force_lock=force_lock, + tmphlq=tmphlq ) try: @@ -3677,6 +3704,7 @@ def run_module(module, arg_def): aliases=aliases, common_file_args=dict(mode=mode, group=group, owner=owner), backup_name=backup_name, + tmphlq=tmphlq ) original_checksum = None @@ -3744,6 +3772,7 @@ def run_module(module, arg_def): aliases=aliases, backup_name=backup_name, force_lock=force_lock, + tmphlq=tmphlq ) pdse_copy_handler.copy_to_pdse( diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 97bc107fd..98aae93d5 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -1405,7 +1405,7 @@ def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): """ changed = False if state == "present" and data_set.data_set_type in ["member", "gdg"]: - changed = data_set.ensure_present(replace=replace) + changed = data_set.ensure_present(replace=replace, tmp_hlq=tmp_hlq) elif state == "present": changed = data_set.ensure_present(replace=replace, tmp_hlq=tmp_hlq, force=force) elif state == "absent" and data_set.data_set_type == "member": @@ -1415,9 +1415,9 @@ def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): elif state == "absent": changed = data_set.ensure_absent() elif state == "cataloged": - changed = data_set.ensure_cataloged() + changed = data_set.ensure_cataloged(tmphlq=tmp_hlq) elif state == "uncataloged": - changed = data_set.ensure_uncataloged() + changed = data_set.ensure_uncataloged(tmphlq=tmp_hlq) return changed diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index b26cbe296..fb6fdfac5 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -367,7 +367,7 @@ def check_mvs_dataset(ds, tmphlq=None): """ check_rc = False ds_type = None - if not data_set.DataSet.data_set_exists(ds): + if not data_set.DataSet.data_set_exists(ds, tmphlq=tmphlq): raise EncodeError( "Data set {0} is not cataloged, please check data set provided in" "the src option.".format(ds) @@ -544,9 +544,12 @@ def run_module(): dest_exists = False if not is_name_member: - dest_exists = data_set.DataSet.data_set_exists(src_data_set.name) + dest_exists = data_set.DataSet.data_set_exists(src_data_set.name, tmphlq=tmphlq) else: - dest_exists = data_set.DataSet.data_set_exists(data_set.extract_dsname(src_data_set.name)) + dest_exists = data_set.DataSet.data_set_exists( + data_set.extract_dsname(src_data_set.name), + tmphlq=tmphlq + ) if not dest_exists: raise EncodeError( @@ -562,7 +565,7 @@ def run_module(): )) ds_type_src = "PS" else: - ds_type_src = data_set.DataSet.data_set_type(src_data_set.name) + ds_type_src = data_set.DataSet.data_set_type(src_data_set.name, tmphlq=tmphlq) if not ds_type_src: raise EncodeError("Unable to determine data set type of {0}".format(src_data_set.raw_name)) @@ -589,9 +592,12 @@ def run_module(): is_name_member = data_set.is_member(dest_data_set.name) if not is_name_member: - dest_exists = data_set.DataSet.data_set_exists(dest_data_set.name) + dest_exists = data_set.DataSet.data_set_exists(dest_data_set.name, tmphlq=tmphlq) else: - dest_exists = data_set.DataSet.data_set_exists(data_set.extract_dsname(dest_data_set.name)) + dest_exists = data_set.DataSet.data_set_exists( + data_set.extract_dsname(dest_data_set.name), + tmphlq=tmphlq + ) if not dest_exists: raise EncodeError( @@ -602,7 +608,7 @@ def run_module(): if is_name_member: ds_type_dest = "PS" else: - ds_type_dest = data_set.DataSet.data_set_type(dest_data_set.name) + ds_type_dest = data_set.DataSet.data_set_type(dest_data_set.name, tmphlq=tmphlq) if (not is_uss_dest) and (path.sep in dest): try: @@ -675,6 +681,7 @@ def run_module(): to_encoding, src_type=ds_type_src, dest_type=ds_type_dest, + tmphlq=tmphlq ) if convert_rc: diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index 92f1086fd..cd00b4216 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -885,6 +885,7 @@ def run_module(): fail_on_missing = boolean(parsed_args.get("fail_on_missing")) is_binary = boolean(parsed_args.get("is_binary")) encoding = module.params.get("encoding") + tmphlq = module.params.get("tmp_hlq") # ********************************************************** # # Check for data set existence and determine its type # @@ -906,7 +907,8 @@ def run_module(): src_exists = data_set.DataSet.data_set_member_exists(src_data_set.name) else: src_exists = data_set.DataSet.data_set_exists( - src_data_set.name + src_data_set.name, + tmphlq=tmphlq ) if not src_exists: @@ -936,7 +938,10 @@ def run_module(): if "/" in src: ds_type = "USS" else: - ds_type = data_set.DataSet.data_set_type(data_set.extract_dsname(src_data_set.name)) + ds_type = data_set.DataSet.data_set_type( + data_set.extract_dsname(src_data_set.name), + tmphlq=tmphlq + ) if not ds_type: module.fail_json(msg="Unable to determine source type. No data was fetched.") From 32b08ac5c2f18118b2e2dc43c9a0f504882d0342 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Wed, 11 Sep 2024 12:46:39 -0600 Subject: [PATCH 17/26] Fix tmp_hlq in zos_copy --- plugins/modules/zos_copy.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index f0cabf3d5..dd2e724cc 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -2404,7 +2404,7 @@ def create_seq_dataset_from_file( volume=volume ) - data_set.DataSet.ensure_present(replace=force, tmphlq=tmphlq, **dest_params) + data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) def backup_data(ds_name, ds_type, backup_name, tmphlq=None): @@ -2943,7 +2943,7 @@ def allocate_destination_data_set( del dest_params["purge"] del dest_params["extended"] del dest_params["fifo"] - data_set.DataSet.ensure_present(replace=force, tmphlq=tmphlq, **dest_params) + data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_SEQ: volumes = [volume] if volume else None data_set.DataSet.ensure_absent(dest, volumes=volumes) @@ -3002,7 +3002,7 @@ def allocate_destination_data_set( type="PDSE", volume=volume ) - data_set.DataSet.ensure_present(replace=force, tmphlq=tmphlq, **dest_params) + data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) elif src_ds_type == "USS": if os.path.isfile(src): # This is almost the same as allocating a sequential dataset. @@ -3060,7 +3060,7 @@ def allocate_destination_data_set( volume=volume ) - data_set.DataSet.ensure_present(replace=force, tmphlq=tmphlq, **dest_params) + data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) elif dest_ds_type in data_set.DataSet.MVS_VSAM: # If dest_data_set is not available, always create the destination using the src VSAM # as a model. From 0460acf77455ca561098718ab206b61616e9c437 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Wed, 11 Sep 2024 15:28:15 -0600 Subject: [PATCH 18/26] Fix ensure_present for members --- plugins/modules/zos_data_set.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 98aae93d5..72d4b4d25 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -1405,7 +1405,7 @@ def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): """ changed = False if state == "present" and data_set.data_set_type in ["member", "gdg"]: - changed = data_set.ensure_present(replace=replace, tmp_hlq=tmp_hlq) + changed = data_set.ensure_present(replace=replace) elif state == "present": changed = data_set.ensure_present(replace=replace, tmp_hlq=tmp_hlq, force=force) elif state == "absent" and data_set.data_set_type == "member": From 0ebbc2f83535054eaff3ca7610c62a6039802509 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Fri, 13 Sep 2024 10:32:20 -0600 Subject: [PATCH 19/26] Fix wrong keyword arg in dataset util --- plugins/modules/zos_data_set.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 72d4b4d25..f68d5c1ba 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -1415,9 +1415,9 @@ def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): elif state == "absent": changed = data_set.ensure_absent() elif state == "cataloged": - changed = data_set.ensure_cataloged(tmphlq=tmp_hlq) + changed = data_set.ensure_cataloged(tmp_hlq=tmp_hlq) elif state == "uncataloged": - changed = data_set.ensure_uncataloged(tmphlq=tmp_hlq) + changed = data_set.ensure_uncataloged(tmp_hlq=tmp_hlq) return changed From 5a0d0a8e8a5d18c828576c9b5d32c71f531c6557 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Fri, 13 Sep 2024 10:56:29 -0600 Subject: [PATCH 20/26] Update zos_archive and zos_unarchive --- plugins/modules/zos_archive.py | 6 +++--- plugins/modules/zos_unarchive.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 52fdd9585..50288d300 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -1038,7 +1038,7 @@ def find_targets(self): """Finds target datasets in host. """ for path in self.sources: - if data_set.DataSet.data_set_exists(path): + if data_set.DataSet.data_set_exists(path, tmphlq=self.tmphlq): self.targets.append(path) else: self.not_found.append(path) @@ -1148,7 +1148,7 @@ def create_dest_ds(self, name): Name of the newly created data set. """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH - data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) + data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length, tmphlq=self.tmphlq) # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) # rc, out, err = self.module.run_command(cmd) @@ -1266,7 +1266,7 @@ def dest_exists(self): bool If destination path exists. """ - return data_set.DataSet.data_set_exists(self.dest) + return data_set.DataSet.data_set_exists(self.dest, tmphlq=self.tmphlq) def remove_targets(self): """Removes the archived targets and changes the state accordingly. diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index f5febbf90..a80138300 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -783,7 +783,7 @@ def _restore(self, source): return rc def src_exists(self): - return data_set.DataSet.data_set_exists(self.src) + return data_set.DataSet.data_set_exists(self.src, tmphlq=self.tmphlq) def _get_restored_datasets(self, output): ds_list = list() From ac0ad93376c1e6c69ab95530cac568e8ee489735 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Wed, 18 Sep 2024 09:40:43 -0600 Subject: [PATCH 21/26] Fix tmphlq error in zos_archive --- plugins/modules/zos_archive.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index 50288d300..026c56188 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -1018,6 +1018,7 @@ def __init__(self, module): High level qualifier for temporary datasets. """ super(MVSArchive, self).__init__(module) + self.tmphlq = module.params.get("tmp_hlq") self.original_checksums = self.dest_checksums() self.use_adrdssu = module.params.get("format").get("format_options").get("use_adrdssu") self.expanded_sources = self.expand_mvs_paths(self.sources) @@ -1026,7 +1027,6 @@ def __init__(self, module): self.tmp_data_sets = list() self.dest_data_set = module.params.get("dest_data_set") self.dest_data_set = dict() if self.dest_data_set is None else self.dest_data_set - self.tmphlq = module.params.get("tmp_hlq") def open(self): pass From ba56c4d97df0f9850e7f16dadc8ef31b5f72cde5 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Thu, 19 Sep 2024 19:03:39 -0600 Subject: [PATCH 22/26] Add changelog fragment --- .../1695-tmp_hlq_when_calling_mvscmd.yml | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml diff --git a/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml b/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml new file mode 100644 index 000000000..c20ba4660 --- /dev/null +++ b/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml @@ -0,0 +1,31 @@ +bugfixes: + - zos_apf - The value of ``tmp_hlq`` would not be used when running + MVS commands. Fix now passes the value to them. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_archive - The value of ``tmp_hlq`` would not be used when running + MVS commands. Fix now passes the value to them. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_blockinfile - The value of ``tmp_hlq`` would not be used when running + MVS commands. Fix now passes the value to them. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_copy - The value of ``tmp_hlq`` would not be used when running + MVS commands. Fix now passes the value to them. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_data_set - The value of ``tmp_hlq`` would not be used when running + MVS commands. Fix now passes the value to them. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_encode - The value of ``tmp_hlq`` would not be used when running + MVS commands. Fix now passes the value to them. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_fetch - The value of ``tmp_hlq`` would not be used when running + MVS commands. Fix now passes the value to them. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_lineinfile - The value of ``tmp_hlq`` would not be used when running + MVS commands. Fix now passes the value to them. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_mount - The value of ``tmp_hlq`` would not be used when running + MVS commands. Fix now passes the value to them. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). + - zos_unarchive - The value of ``tmp_hlq`` would not be used when running + MVS commands. Fix now passes the value to them. + (https://github.com/ansible-collections/ibm_zos_core/pull/1695). \ No newline at end of file From ee48f20aa79ff6881dbebe1d8c8b443a6153ec2c Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Tue, 24 Sep 2024 11:53:58 -0600 Subject: [PATCH 23/26] Update changelog fragment --- .../1695-tmp_hlq_when_calling_mvscmd.yml | 40 +++++++++---------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml b/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml index c20ba4660..2539501ea 100644 --- a/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml +++ b/changelogs/fragments/1695-tmp_hlq_when_calling_mvscmd.yml @@ -1,31 +1,31 @@ bugfixes: - - zos_apf - The value of ``tmp_hlq`` would not be used when running - MVS commands. Fix now passes the value to them. + - zos_apf - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_archive - The value of ``tmp_hlq`` would not be used when running - MVS commands. Fix now passes the value to them. + - zos_archive - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_blockinfile - The value of ``tmp_hlq`` would not be used when running - MVS commands. Fix now passes the value to them. + - zos_blockinfile - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_copy - The value of ``tmp_hlq`` would not be used when running - MVS commands. Fix now passes the value to them. + - zos_copy - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_data_set - The value of ``tmp_hlq`` would not be used when running - MVS commands. Fix now passes the value to them. + - zos_data_set - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_encode - The value of ``tmp_hlq`` would not be used when running - MVS commands. Fix now passes the value to them. + - zos_encode - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_fetch - The value of ``tmp_hlq`` would not be used when running - MVS commands. Fix now passes the value to them. + - zos_fetch - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_lineinfile - The value of ``tmp_hlq`` would not be used when running - MVS commands. Fix now passes the value to them. + - zos_lineinfile - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_mount - The value of ``tmp_hlq`` would not be used when running - MVS commands. Fix now passes the value to them. + - zos_mount - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). - - zos_unarchive - The value of ``tmp_hlq`` would not be used when running - MVS commands. Fix now passes the value to them. + - zos_unarchive - The ``tmp_hlq`` option was previously ignored and default + values were used instead. Fix now honors the value set in the module option. (https://github.com/ansible-collections/ibm_zos_core/pull/1695). \ No newline at end of file From 85b5e6b93bc0b0d5ec52df270c5dfdffac3a2376 Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Mon, 30 Sep 2024 17:45:22 -0600 Subject: [PATCH 24/26] Update dataset function calls --- plugins/module_utils/data_set.py | 9 +++++++-- plugins/modules/zos_data_set.py | 8 ++++---- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 35e2bffe6..c1740b5a4 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -2147,15 +2147,20 @@ def ensure_present(self, tmp_hlq=None, replace=False, force=False): self.set_state("present") return rc - def ensure_absent(self): + def ensure_absent(self, tmp_hlq=None): """Removes the data set. + Parameters + ---------- + tmp_hlq : str + High level qualifier for temporary datasets. + Returns ------- int Indicates if changes were made. """ - rc = DataSet.ensure_absent(self.name, self.volumes) + rc = DataSet.ensure_absent(self.name, self.volumes, tmphlq=tmp_hlq) if rc == 0: self.set_state("absent") return rc diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index f68d5c1ba..514952495 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -1405,15 +1405,15 @@ def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): """ changed = False if state == "present" and data_set.data_set_type in ["member", "gdg"]: - changed = data_set.ensure_present(replace=replace) + changed = data_set.ensure_present(replace=replace, tmp_hlq=tmp_hlq) elif state == "present": changed = data_set.ensure_present(replace=replace, tmp_hlq=tmp_hlq, force=force) elif state == "absent" and data_set.data_set_type == "member": - changed = data_set.ensure_absent(force=force) + changed = data_set.ensure_absent(force=force, tmp_hlq=tmp_hlq) elif state == "absent" and data_set.data_set_type == "gdg": - changed = data_set.ensure_absent(force=force) + changed = data_set.ensure_absent(force=force, tmp_hlq=tmp_hlq) elif state == "absent": - changed = data_set.ensure_absent() + changed = data_set.ensure_absent(tmp_hlq=tmp_hlq) elif state == "cataloged": changed = data_set.ensure_cataloged(tmp_hlq=tmp_hlq) elif state == "uncataloged": From d1ac0d6e02ebda9973c7598c0384b9aacd4eecee Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Wed, 2 Oct 2024 09:51:47 -0600 Subject: [PATCH 25/26] Fix function calls in zos_data_set --- plugins/modules/zos_data_set.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index 514952495..79a85dac3 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -1404,14 +1404,16 @@ def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): If changes were made. """ changed = False - if state == "present" and data_set.data_set_type in ["member", "gdg"]: - changed = data_set.ensure_present(replace=replace, tmp_hlq=tmp_hlq) + if state == "present" and data_set.data_set_type == "member": + changed = data_set.ensure_present(replace=replace, tmphlq=tmp_hlq) + elif state == "present" and data_set.data_set_type == "gdg": + changed = data_set.ensure_present(replace=replace) elif state == "present": changed = data_set.ensure_present(replace=replace, tmp_hlq=tmp_hlq, force=force) elif state == "absent" and data_set.data_set_type == "member": - changed = data_set.ensure_absent(force=force, tmp_hlq=tmp_hlq) + changed = data_set.ensure_absent(force=force) elif state == "absent" and data_set.data_set_type == "gdg": - changed = data_set.ensure_absent(force=force, tmp_hlq=tmp_hlq) + changed = data_set.ensure_absent(force=force) elif state == "absent": changed = data_set.ensure_absent(tmp_hlq=tmp_hlq) elif state == "cataloged": From 3dc80f94dba8f3879ed502791c445652bc0ac1da Mon Sep 17 00:00:00 2001 From: Ivan Moreno Date: Wed, 2 Oct 2024 12:14:32 -0600 Subject: [PATCH 26/26] Fix missing import --- tests/functional/modules/test_zos_backup_restore.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/modules/test_zos_backup_restore.py b/tests/functional/modules/test_zos_backup_restore.py index 32b721cfb..63f110ee7 100644 --- a/tests/functional/modules/test_zos_backup_restore.py +++ b/tests/functional/modules/test_zos_backup_restore.py @@ -23,6 +23,7 @@ from re import search, IGNORECASE, MULTILINE import string import random +import time from ibm_zos_core.tests.helpers.utils import get_random_file_name DATA_SET_CONTENTS = "HELLO WORLD"